diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile deleted file mode 100644 index 5e7188d..0000000 --- a/.devcontainer/Dockerfile +++ /dev/null @@ -1,13 +0,0 @@ -FROM mcr.microsoft.com/devcontainers/go:1-1.21-bullseye - -# [Optional] Uncomment this section to install additional OS packages. -# RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ -# && apt-get -y install --no-install-recommends - -# [Optional] Uncomment the next lines to use go get to install anything else you need -# USER vscode -# RUN go get -x -# USER root - -# [Optional] Uncomment this line to install global node packages. -# RUN su vscode -c "source /usr/local/share/nvm/nvm.sh && npm install -g " 2>&1 diff --git a/.devcontainer/app.env b/.devcontainer/app.env deleted file mode 100644 index 8bc887e..0000000 --- a/.devcontainer/app.env +++ /dev/null @@ -1,8 +0,0 @@ -POSTGRES_HOST=localhost -POSTGRES_USER=postgres -POSTGRES_PASSWORD=postgres -POSTGRES_DB=postgres -POSTGRES_PORT=5432 - -PORT=8000 -CLIENT_ORIGIN=http://localhost:3000 \ No newline at end of file diff --git a/.devcontainer/dev/.helmignore b/.devcontainer/dev/.helmignore new file mode 100644 index 0000000..0e8a0eb --- /dev/null +++ b/.devcontainer/dev/.helmignore @@ -0,0 +1,23 @@ +# Patterns to ignore when building packages. +# This supports shell glob matching, relative path matching, and +# negation (prefixed with !). Only one pattern per line. +.DS_Store +# Common VCS dirs +.git/ +.gitignore +.bzr/ +.bzrignore +.hg/ +.hgignore +.svn/ +# Common backup files +*.swp +*.bak +*.tmp +*.orig +*~ +# Various IDEs +.project +.idea/ +*.tmproj +.vscode/ diff --git a/.devcontainer/dev/Chart.yaml b/.devcontainer/dev/Chart.yaml new file mode 100644 index 0000000..99c630b --- /dev/null +++ b/.devcontainer/dev/Chart.yaml @@ -0,0 +1,14 @@ +apiVersion: v2 +name: ark-dev +description: Dev Helm chart for Ark developer environment +type: application +version: 0.0.0 +appVersion: 1.20.2 + +# Bitnami charts are served from GitHub CDN - See https://github.com/bitnami/charts/issues/10539 for details +dependencies: + # Chart release date: 2023-07 (https://github.com/bitnami/charts/blob/main/bitnami/postgresql/Chart.yaml) + - name: postgresql + repository: oci://registry-1.docker.io/bitnamicharts + version: 12.6.6 + condition: postgresql.enabled \ No newline at end of file diff --git a/.devcontainer/dev/charts/postgresql-12.6.6.tgz b/.devcontainer/dev/charts/postgresql-12.6.6.tgz new file mode 100644 index 0000000..0837ffe Binary files /dev/null and b/.devcontainer/dev/charts/postgresql-12.6.6.tgz differ diff --git a/.devcontainer/dev/templates/configmap.yml b/.devcontainer/dev/templates/configmap.yml new file mode 100644 index 0000000..d20d15d --- /dev/null +++ b/.devcontainer/dev/templates/configmap.yml @@ -0,0 +1,4 @@ +apiVersion: v1 +kind: ConfigMap +metadata: + name: {{ .Release.Name }}-configmap \ No newline at end of file diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 816ba3c..170b240 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -1,23 +1,45 @@ // For format details, see https://aka.ms/devcontainer.json. For config options, see the -// README at: https://github.com/devcontainers/templates/tree/main/src/go-postgres +// README at: https://github.com/devcontainers/templates/tree/main/src/kubernetes-helm-minikube { - "name": "Go & PostgreSQL", - "dockerComposeFile": "docker-compose.yml", - "service": "app", - "workspaceFolder": "/workspaces/${localWorkspaceFolderBasename}" - - // Features to add to the dev container. More info: https://containers.dev/features. - // "features": {}, - - // Configure tool-specific properties. - // "customizations": {}, + "name": "ark-devcontainer", + // Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile + "image": "mcr.microsoft.com/devcontainers/base:bullseye", + "features": { + "ghcr.io/devcontainers/features/docker-in-docker:2": { + "enableNonRootDocker": "true", + "moby": "true" + }, + "ghcr.io/devcontainers/features/kubectl-helm-minikube:1": { + "version": "latest", + "helm": "latest", + "minikube": "latest" + }, + "ghcr.io/devcontainers/features/go:1": { + "version": "latest", + "golangciLintVersion": "latest" + }, + "ghcr.io/mpriscella/features/kind:1": { + "version": "latest" + } + }, + + "extensions": ["golang.go","premparihar.gotestexplorer","github.vscode-pull-request-github","eamodio.gitlens"], + // Use 'forwardPorts' to make a list of ports inside the container available locally. - // "forwardPorts": [5432], + // "forwardPorts": [], // Use 'postCreateCommand' to run commands after the container is created. - // "postCreateCommand": "go version", + // "postCreateCommand": "kubectl version", + + // Use 'postStartCommand' to run commands after the container is created like starting minikube. + "postStartCommand": "kind create cluster && helm install dev .devcontainer/dev", + // "postStartCommand": "nohup bash -c 'minikube start &' > minikube.log 2>&1 && helm install dev .devcontainer/dev", + + // Configure tool-specific properties. + // "customizations": {}, // Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root. // "remoteUser": "root" } + diff --git a/.devcontainer/docker-compose.yml b/.devcontainer/docker-compose.yml deleted file mode 100644 index de06eed..0000000 --- a/.devcontainer/docker-compose.yml +++ /dev/null @@ -1,38 +0,0 @@ -version: '3.8' - -volumes: - postgres-data: - -services: - app: - build: - context: . - dockerfile: Dockerfile - env_file: - # Ensure that the variables in .env match the same variables in devcontainer.json - - .env - - volumes: - - ../..:/workspaces:cached - - # Overrides default command so things don't shut down after the process ends. - command: sleep infinity - - # Runs app on the same network as the database container, allows "forwardPorts" in devcontainer.json function. - network_mode: service:db - - # Use "forwardPorts" in **devcontainer.json** to forward an app port locally. - # (Adding the "ports" property to this file will not forward from a Codespace.) - - db: - image: postgres:15-bullseye - restart: unless-stopped - volumes: - - postgres-data:/var/lib/postgresql/data - env_file: - # Ensure that the variables in .env match the same variables in devcontainer.json - - app.env - - - # Add "forwardPorts": ["5432"] to **devcontainer.json** to forward PostgreSQL locally. - # (Adding the "ports" property to this file will not forward from a Codespace.) diff --git a/LICENSE b/LICENSE index b423bb3..363ffa9 100644 --- a/LICENSE +++ b/LICENSE @@ -1,4 +1,4 @@ -Copyright 2023 Timothy Stiles, Keoni Gandall +Copyright 2021 Keoni Gandall, Timothy Stiles Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: diff --git a/app/app.go b/app/app.go new file mode 100644 index 0000000..4879f7a --- /dev/null +++ b/app/app.go @@ -0,0 +1 @@ +package app diff --git a/client/client.go b/client/client.go new file mode 100644 index 0000000..da13c8e --- /dev/null +++ b/client/client.go @@ -0,0 +1 @@ +package client diff --git a/data/chembl_schema.sql b/data/chembl_schema.sql new file mode 100644 index 0000000..2a0df25 --- /dev/null +++ b/data/chembl_schema.sql @@ -0,0 +1,1107 @@ +CREATE TABLE irac_classification ( + irac_class_id BIGINT NOT NULL, + active_ingredient VARCHAR(500) NOT NULL, + level1 VARCHAR(1) NOT NULL, + level1_description VARCHAR(2000) NOT NULL, + level2 VARCHAR(3) NOT NULL, + level2_description VARCHAR(2000) NOT NULL, + level3 VARCHAR(6) NOT NULL, + level3_description VARCHAR(2000) NOT NULL, + level4 VARCHAR(8) NOT NULL, + irac_code VARCHAR(3) NOT NULL, + CONSTRAINT irac_classification_pk PRIMARY KEY (irac_class_id), + CONSTRAINT uk_irac_class_l4 UNIQUE (level4) +); +CREATE TABLE chembl_id_lookup ( + chembl_id VARCHAR(20) NOT NULL, + entity_type VARCHAR(50) NOT NULL, + entity_id BIGINT NOT NULL, + status VARCHAR(10) NOT NULL, + last_active INTEGER, + CONSTRAINT chembl_id_lookup_pk PRIMARY KEY (chembl_id), + CONSTRAINT ck_chembl_id_lookup_status CHECK (status in ('ACTIVE','INACTIVE','OBS')), + CONSTRAINT chembl_id_lookup_uk UNIQUE (entity_type, entity_id) +); +CREATE TABLE atc_classification ( + who_name VARCHAR(2000), + level1 VARCHAR(10), + level2 VARCHAR(10), + level3 VARCHAR(10), + level4 VARCHAR(10), + level5 VARCHAR(10) NOT NULL, + level1_description VARCHAR(2000), + level2_description VARCHAR(2000), + level3_description VARCHAR(2000), + level4_description VARCHAR(2000), + CONSTRAINT pk_atc_code PRIMARY KEY (level5) +); +CREATE TABLE usan_stems ( + usan_stem_id BIGINT NOT NULL, + stem VARCHAR(100) NOT NULL, + subgroup VARCHAR(100) NOT NULL, + annotation VARCHAR(2000), + stem_class VARCHAR(100), + major_class VARCHAR(100), + who_extra SMALLINT, + CONSTRAINT pk_usan_stems PRIMARY KEY (usan_stem_id), + CONSTRAINT ck_usan_stems_class CHECK (stem_class in ('Suffix','Prefix','Infix')), + CONSTRAINT ck_usan_stems_mc CHECK (major_class in ('GPCR','NR','PDE','kinase','ion channel','protease')), + CONSTRAINT ck_usan_stems_who CHECK (who_extra in (0,1)), + CONSTRAINT uk_usan_stems_stemsub UNIQUE (stem, subgroup) +); +CREATE TABLE organism_class ( + oc_id BIGINT NOT NULL, + tax_id BIGINT, + l1 VARCHAR(200), + l2 VARCHAR(200), + l3 VARCHAR(200), + CONSTRAINT pk_orgclass_oc_id PRIMARY KEY (oc_id), + CONSTRAINT uk_orgclass_tax_id UNIQUE (tax_id) +); +CREATE TABLE data_validity_lookup ( + data_validity_comment VARCHAR(30) NOT NULL, + description VARCHAR(200), + CONSTRAINT sys_c00117330 PRIMARY KEY (data_validity_comment) +); +CREATE TABLE assay_type ( + assay_type VARCHAR(1) NOT NULL, + assay_desc VARCHAR(250), + CONSTRAINT pk_assaytype_assay_type PRIMARY KEY (assay_type) +); +CREATE TABLE confidence_score_lookup ( + confidence_score SMALLINT NOT NULL, + description VARCHAR(100) NOT NULL, + target_mapping VARCHAR(30) NOT NULL, + CONSTRAINT confidence_score_lookup_pk PRIMARY KEY (confidence_score) +); +CREATE TABLE curation_lookup ( + curated_by VARCHAR(32) NOT NULL, + description VARCHAR(100) NOT NULL, + CONSTRAINT pf_curlu_cur_by PRIMARY KEY (curated_by) +); +CREATE TABLE source ( + src_id INTEGER NOT NULL, + src_description VARCHAR(500), + src_short_name VARCHAR(20), + CONSTRAINT pk_source_src_id PRIMARY KEY (src_id) +); +CREATE TABLE relationship_type ( + relationship_type VARCHAR(1) NOT NULL, + relationship_desc VARCHAR(250), + CONSTRAINT pk_reltype_relationship_type PRIMARY KEY (relationship_type) +); +CREATE TABLE target_type ( + target_type VARCHAR(30) NOT NULL, + target_desc VARCHAR(250), + parent_type VARCHAR(25), + CONSTRAINT pk_targtype_target_type PRIMARY KEY (target_type) +); +CREATE TABLE variant_sequences ( + variant_id BIGINT NOT NULL, + mutation VARCHAR(2000), + accession VARCHAR(25), + version BIGINT, + isoform BIGINT, + sequence TEXT, + organism VARCHAR(200), + tax_id BIGINT, + CONSTRAINT pk_varseq_variant_id PRIMARY KEY (variant_id), + CONSTRAINT uk_varseq_mut_acc UNIQUE (mutation, accession) +); +CREATE TABLE bioassay_ontology ( + bao_id VARCHAR(11) NOT NULL, + label VARCHAR(100) NOT NULL, + CONSTRAINT bioassay_ontology_pk PRIMARY KEY (bao_id) +); +CREATE TABLE action_type ( + action_type VARCHAR(50) NOT NULL, + description VARCHAR(200) NOT NULL, + parent_type VARCHAR(50), + CONSTRAINT action_type_pk PRIMARY KEY (action_type) +); +CREATE TABLE frac_classification ( + frac_class_id BIGINT NOT NULL, + active_ingredient VARCHAR(500) NOT NULL, + level1 VARCHAR(2) NOT NULL, + level1_description VARCHAR(2000) NOT NULL, + level2 VARCHAR(2) NOT NULL, + level2_description VARCHAR(2000), + level3 VARCHAR(6) NOT NULL, + level3_description VARCHAR(2000), + level4 VARCHAR(7) NOT NULL, + level4_description VARCHAR(2000), + level5 VARCHAR(8) NOT NULL, + frac_code VARCHAR(4) NOT NULL, + CONSTRAINT frac_classification_pk PRIMARY KEY (frac_class_id), + CONSTRAINT uk_frac_class_l5 UNIQUE (level5) +); +CREATE TABLE activity_smid ( + smid BIGINT NOT NULL, + CONSTRAINT pk_actsamid PRIMARY KEY (smid) +); +CREATE TABLE component_sequences ( + component_id BIGINT NOT NULL, + component_type VARCHAR(50), + accession VARCHAR(25), + sequence TEXT, + sequence_md5sum VARCHAR(32), + description VARCHAR(200), + tax_id BIGINT, + organism VARCHAR(150), + db_source VARCHAR(25), + db_version VARCHAR(10), + CONSTRAINT pk_targcomp_seqs_compid PRIMARY KEY (component_id), + CONSTRAINT ck_targcomp_seqs_src CHECK (db_source in ('SWISS-PROT','TREMBL','Manual')), + CONSTRAINT ck_targcomp_seqs_taxid CHECK (tax_id > 0), + CONSTRAINT ck_targcomp_seqs_type CHECK (component_type in ('PROTEIN','DNA','RNA')), + CONSTRAINT uk_targcomp_seqs_acc UNIQUE (accession) +); +CREATE TABLE protein_classification ( + protein_class_id BIGINT NOT NULL, + parent_id BIGINT, + pref_name VARCHAR(500), + short_name VARCHAR(50), + protein_class_desc VARCHAR(410) NOT NULL, + definition VARCHAR(4000), + class_level BIGINT NOT NULL, + CONSTRAINT prot_class_pk PRIMARY KEY (protein_class_id), + CONSTRAINT ck_prot_class_level CHECK (class_level >= 0 and class_level <= 10) +); +CREATE TABLE bio_component_sequences ( + component_id BIGINT NOT NULL, + component_type VARCHAR(50) NOT NULL, + description VARCHAR(200), + sequence TEXT, + sequence_md5sum VARCHAR(32), + tax_id BIGINT, + organism VARCHAR(150), + CONSTRAINT pk_biocomp_seqs_compid PRIMARY KEY (component_id) +); +CREATE TABLE go_classification ( + go_id VARCHAR(10) NOT NULL, + parent_go_id VARCHAR(10), + pref_name VARCHAR(200), + class_level SMALLINT, + aspect VARCHAR(1), + path VARCHAR(1000), + CONSTRAINT go_classification_pk PRIMARY KEY (go_id) +); +CREATE TABLE assay_classification ( + assay_class_id BIGINT NOT NULL, + l1 VARCHAR(100), + l2 VARCHAR(100), + l3 VARCHAR(1000), + class_type VARCHAR(50), + source VARCHAR(50), + CONSTRAINT pk_assay_class PRIMARY KEY (assay_class_id), + CONSTRAINT uk_assay_class_l3 UNIQUE (l3) +); +CREATE TABLE structural_alert_sets ( + alert_set_id BIGINT NOT NULL, + set_name VARCHAR(100) NOT NULL, + priority SMALLINT NOT NULL, + CONSTRAINT pk_str_alert_set_id PRIMARY KEY (alert_set_id), + CONSTRAINT uk_str_alert_name UNIQUE (set_name) +); +CREATE TABLE products ( + dosage_form VARCHAR(200), + route VARCHAR(200), + trade_name VARCHAR(200), + approval_date DATETIME, + ad_type VARCHAR(5), + oral SMALLINT, + topical SMALLINT, + parenteral SMALLINT, + black_box_warning SMALLINT, + applicant_full_name VARCHAR(200), + innovator_company SMALLINT, + product_id VARCHAR(30) NOT NULL, + nda_type VARCHAR(10), + CONSTRAINT pk_products_id PRIMARY KEY (product_id), + CONSTRAINT ck_products_adtype CHECK (ad_type in ('OTC','RX','DISCN')), + CONSTRAINT ck_products_bbw CHECK (black_box_warning in (0,1)), + CONSTRAINT ck_products_inn CHECK (innovator_company in (0,1)), + CONSTRAINT ck_products_nda CHECK (NDA_TYPE in ('N','A')), + CONSTRAINT ck_products_oral CHECK (oral in (0,1)), + CONSTRAINT ck_products_par CHECK (parenteral in (0,1)), + CONSTRAINT ck_products_top CHECK (topical in (0,1)) +); +CREATE TABLE patent_use_codes ( + patent_use_code VARCHAR(8) NOT NULL, + definition VARCHAR(500) NOT NULL, + CONSTRAINT patent_use_codes_pk PRIMARY KEY (patent_use_code), + CONSTRAINT ck_patent_use_code CHECK (patent_use_code like ('U-%')) +); +CREATE TABLE research_stem ( + res_stem_id BIGINT NOT NULL, + research_stem VARCHAR(20), + CONSTRAINT pk_res_stem_id PRIMARY KEY (res_stem_id), + CONSTRAINT uk_res_stem UNIQUE (research_stem) +); +CREATE TABLE hrac_classification ( + hrac_class_id BIGINT NOT NULL, + active_ingredient VARCHAR(500) NOT NULL, + level1 VARCHAR(2) NOT NULL, + level1_description VARCHAR(2000) NOT NULL, + level2 VARCHAR(3) NOT NULL, + level2_description VARCHAR(2000), + level3 VARCHAR(5) NOT NULL, + hrac_code VARCHAR(2) NOT NULL, + CONSTRAINT hrac_classification_pk PRIMARY KEY (hrac_class_id), + CONSTRAINT uk_hrac_class_l3 UNIQUE (level3) +); +CREATE TABLE protein_family_classification ( + protein_class_id BIGINT NOT NULL, + protein_class_desc VARCHAR(810) NOT NULL, + l1 VARCHAR(100) NOT NULL, + l2 VARCHAR(100), + l3 VARCHAR(100), + l4 VARCHAR(100), + l5 VARCHAR(100), + l6 VARCHAR(100), + l7 VARCHAR(100), + l8 VARCHAR(100), + CONSTRAINT protein_class_pk PRIMARY KEY (protein_class_id), + CONSTRAINT uk_protclass_desc UNIQUE (protein_class_desc), + CONSTRAINT uk_protclass_levels UNIQUE (l1, l2, l3, l4, l5, l6, l7, l8) +); +CREATE TABLE domains ( + domain_id BIGINT NOT NULL, + domain_type VARCHAR(20) NOT NULL, + source_domain_id VARCHAR(20) NOT NULL, + domain_name VARCHAR(20), + domain_description VARCHAR(500), + CONSTRAINT pk_domain_id PRIMARY KEY (domain_id), + CONSTRAINT ck_domain_type CHECK (domain_type in ('Pfam-A','Pfam-B')) +); +CREATE TABLE version ( + name VARCHAR(20) NOT NULL, + creation_date DATETIME, + comments VARCHAR(2000), + CONSTRAINT pk_version_name PRIMARY KEY (name) +); +CREATE TABLE activity_stds_lookup ( + std_act_id BIGINT NOT NULL, + standard_type VARCHAR(250) NOT NULL, + definition VARCHAR(500), + standard_units VARCHAR(100) NOT NULL, + normal_range_min NUMERIC(24, 12), + normal_range_max NUMERIC(24, 12), + CONSTRAINT pk_actstds_stdactid PRIMARY KEY (std_act_id), + CONSTRAINT uk_actstds_typeunits UNIQUE (standard_type, standard_units) +); +CREATE TABLE molecule_dictionary ( + molregno BIGINT NOT NULL, + pref_name VARCHAR(255), + chembl_id VARCHAR(20) NOT NULL, + max_phase SMALLINT NOT NULL, + therapeutic_flag SMALLINT NOT NULL, + dosed_ingredient SMALLINT NOT NULL, + structure_type VARCHAR(10) NOT NULL, + chebi_par_id BIGINT, + molecule_type VARCHAR(30), + first_approval INTEGER, + oral SMALLINT NOT NULL, + parenteral SMALLINT NOT NULL, + topical SMALLINT NOT NULL, + black_box_warning SMALLINT NOT NULL, + natural_product SMALLINT NOT NULL, + first_in_class SMALLINT NOT NULL, + chirality SMALLINT NOT NULL, + prodrug SMALLINT NOT NULL, + inorganic_flag SMALLINT NOT NULL, + usan_year INTEGER, + availability_type SMALLINT, + usan_stem VARCHAR(50), + polymer_flag SMALLINT, + usan_substem VARCHAR(50), + usan_stem_definition VARCHAR(1000), + indication_class VARCHAR(1000), + withdrawn_flag SMALLINT NOT NULL, + withdrawn_year INTEGER, + withdrawn_country VARCHAR(1000), + withdrawn_reason VARCHAR(1000), + withdrawn_class VARCHAR(500), + CONSTRAINT pk_moldict_molregno PRIMARY KEY (molregno), + CONSTRAINT fk_moldict_chembl_id FOREIGN KEY(chembl_id) REFERENCES chembl_id_lookup (chembl_id) ON DELETE CASCADE, + CONSTRAINT ck_moldict_app CHECK (first_approval < 2050 and first_approval > 1900), + CONSTRAINT ck_moldict_bbw CHECK (black_box_warning in (-1,0,1)), + CONSTRAINT ck_moldict_chi CHECK (chirality in (-1,0,1,2)), + CONSTRAINT ck_moldict_dosed CHECK (dosed_ingredient in (0,1)), + CONSTRAINT ck_moldict_fic CHECK (first_in_class in (-1,0,1)), + CONSTRAINT ck_moldict_inor CHECK (inorganic_flag in (-1,0,1)), + CONSTRAINT ck_moldict_np CHECK (natural_product in (-1,0,1)), + CONSTRAINT ck_moldict_oral CHECK (oral in (0,1)), + CONSTRAINT ck_moldict_par CHECK (parenteral in (0,1)), + CONSTRAINT ck_moldict_phase CHECK (max_phase in (0,1,2,3,4)), + CONSTRAINT ck_moldict_polyflag CHECK (polymer_flag IN (0, 1, null)), + CONSTRAINT ck_moldict_pro CHECK (prodrug in (-1,0,1)), + CONSTRAINT ck_moldict_strtype CHECK (structure_type in ('NONE','MOL','SEQ','BOTH')), + CONSTRAINT ck_moldict_theraflag CHECK (therapeutic_flag IN (0, 1)), + CONSTRAINT ck_moldict_top CHECK (topical in (0,1)), + CONSTRAINT ck_moldict_usanyear CHECK (usan_year > 1900 and usan_year < 2050), + CONSTRAINT ck_moldict_withd CHECK (WITHDRAWN_FLAG in (0,1)), + CONSTRAINT uk_moldict_chemblid UNIQUE (chembl_id) +); +CREATE TABLE defined_daily_dose ( + atc_code VARCHAR(10) NOT NULL, + ddd_units VARCHAR(200), + ddd_admr VARCHAR(1000), + ddd_comment VARCHAR(2000), + ddd_id BIGINT NOT NULL, + ddd_value NUMERIC, + CONSTRAINT pk_ddd_id PRIMARY KEY (ddd_id), + CONSTRAINT fk_ddd_atccode FOREIGN KEY(atc_code) REFERENCES atc_classification (level5) ON DELETE CASCADE +); +CREATE TABLE cell_dictionary ( + cell_id BIGINT NOT NULL, + cell_name VARCHAR(50) NOT NULL, + cell_description VARCHAR(200), + cell_source_tissue VARCHAR(50), + cell_source_organism VARCHAR(150), + cell_source_tax_id BIGINT, + clo_id VARCHAR(11), + efo_id VARCHAR(12), + cellosaurus_id VARCHAR(15), + cl_lincs_id VARCHAR(8), + chembl_id VARCHAR(20), + cell_ontology_id VARCHAR(10), + CONSTRAINT pk_celldict_cellid PRIMARY KEY (cell_id), + CONSTRAINT fk_celldict_chembl_id FOREIGN KEY(chembl_id) REFERENCES chembl_id_lookup (chembl_id) ON DELETE CASCADE, + CONSTRAINT ck_cell_dict_lincs CHECK (CL_LINCS_ID like ('LCL-%')), + CONSTRAINT uk_celldict UNIQUE (cell_name, cell_source_tax_id), + CONSTRAINT uk_cell_chembl_id UNIQUE (chembl_id) +); +CREATE TABLE docs ( + doc_id BIGINT NOT NULL, + journal VARCHAR(50), + year INTEGER, + volume VARCHAR(50), + issue VARCHAR(50), + first_page VARCHAR(50), + last_page VARCHAR(50), + pubmed_id BIGINT, + doi VARCHAR(100), + chembl_id VARCHAR(20) NOT NULL, + title VARCHAR(500), + doc_type VARCHAR(50) NOT NULL, + authors VARCHAR(4000), + abstract TEXT, + patent_id VARCHAR(20), + ridx VARCHAR(200) NOT NULL, + src_id INTEGER NOT NULL, + CONSTRAINT pk_docs_doc_id PRIMARY KEY (doc_id), + CONSTRAINT fk_docs_chembl_id FOREIGN KEY(chembl_id) REFERENCES chembl_id_lookup (chembl_id) ON DELETE CASCADE, + CONSTRAINT fk_docs_src_id FOREIGN KEY(src_id) REFERENCES source (src_id) ON DELETE CASCADE, + CONSTRAINT ck_docs_chemblid CHECK (chembl_id like ('CHEMBL%')), + CONSTRAINT ck_docs_doctype CHECK (doc_type in ('PUBLICATION','BOOK','DATASET','PATENT')), + CONSTRAINT ck_docs_year CHECK (year < 2050 and year > 1900), + CONSTRAINT uk_docs_chemblid UNIQUE (chembl_id) +); +CREATE TABLE target_dictionary ( + tid BIGINT NOT NULL, + target_type VARCHAR(30), + pref_name VARCHAR(200) NOT NULL, + tax_id BIGINT, + organism VARCHAR(150), + chembl_id VARCHAR(20) NOT NULL, + species_group_flag SMALLINT NOT NULL, + CONSTRAINT pk_targdict_tid PRIMARY KEY (tid), + CONSTRAINT fk_targdict_chembl_id FOREIGN KEY(chembl_id) REFERENCES chembl_id_lookup (chembl_id) ON DELETE CASCADE, + CONSTRAINT fk_targdict_target_type FOREIGN KEY(target_type) REFERENCES target_type (target_type) ON DELETE CASCADE, + CONSTRAINT ck_targdict_species CHECK (species_group_flag in (0,1)), + CONSTRAINT uk_targdict_chemblid UNIQUE (chembl_id) +); +CREATE TABLE tissue_dictionary ( + tissue_id BIGINT NOT NULL, + uberon_id VARCHAR(15), + pref_name VARCHAR(200) NOT NULL, + efo_id VARCHAR(20), + chembl_id VARCHAR(20) NOT NULL, + bto_id VARCHAR(20), + caloha_id VARCHAR(7), + CONSTRAINT pk_tissue_dict_tissue_id PRIMARY KEY (tissue_id), + CONSTRAINT fk_tissue_chembl_id FOREIGN KEY(chembl_id) REFERENCES chembl_id_lookup (chembl_id) ON DELETE CASCADE, + CONSTRAINT ck_tissue_uberon_id CHECK (uberon_id like ('UBERON:%')), + CONSTRAINT uk_tissue_chembl_id UNIQUE (chembl_id), + CONSTRAINT uk_tissue_dict_uberon_efo UNIQUE (uberon_id, efo_id), + CONSTRAINT uk_tissue_pref_name UNIQUE (pref_name) +); +CREATE TABLE activity_supp ( + as_id BIGINT NOT NULL, + rgid BIGINT NOT NULL, + smid BIGINT, + type VARCHAR(250) NOT NULL, + relation VARCHAR(50), + value NUMERIC, + units VARCHAR(100), + text_value VARCHAR(1000), + standard_type VARCHAR(250), + standard_relation VARCHAR(50), + standard_value NUMERIC, + standard_units VARCHAR(100), + standard_text_value VARCHAR(1000), + comments VARCHAR(4000), + CONSTRAINT pk_actsupp_as_id PRIMARY KEY (as_id), + CONSTRAINT fk_act_smids FOREIGN KEY(smid) REFERENCES activity_smid (smid) ON DELETE CASCADE, + CONSTRAINT uk_actsupp_rgid_type UNIQUE (rgid, type) +); +CREATE TABLE component_class ( + component_id BIGINT NOT NULL, + protein_class_id BIGINT NOT NULL, + comp_class_id BIGINT NOT NULL, + CONSTRAINT pk_comp_class_id PRIMARY KEY (comp_class_id), + CONSTRAINT fk_comp_class_compid FOREIGN KEY(component_id) REFERENCES component_sequences (component_id) ON DELETE CASCADE, + CONSTRAINT fk_comp_class_pcid FOREIGN KEY(protein_class_id) REFERENCES protein_classification (protein_class_id) ON DELETE CASCADE, + CONSTRAINT uk_comp_class UNIQUE (component_id, protein_class_id) +); +CREATE TABLE protein_class_synonyms ( + protclasssyn_id BIGINT NOT NULL, + protein_class_id BIGINT NOT NULL, + protein_class_synonym VARCHAR(1000), + syn_type VARCHAR(20), + CONSTRAINT pk_protclasssyn_synid PRIMARY KEY (protclasssyn_id), + CONSTRAINT fk_protclasssyn_protclass_id FOREIGN KEY(protein_class_id) REFERENCES protein_classification (protein_class_id) ON DELETE CASCADE, + CONSTRAINT ck_protclasssyn_syntype CHECK (syn_type in ('CHEMBL','CONCEPT_WIKI','UMLS','CW_XREF','MESH_XREF')), + CONSTRAINT uk_protclasssyn UNIQUE (protein_class_id, protein_class_synonym, syn_type) +); +CREATE TABLE structural_alerts ( + alert_id BIGINT NOT NULL, + alert_set_id BIGINT NOT NULL, + alert_name VARCHAR(100) NOT NULL, + smarts VARCHAR(4000) NOT NULL, + CONSTRAINT pk_str_alert_id PRIMARY KEY (alert_id), + CONSTRAINT fk_str_alert_set_id FOREIGN KEY(alert_set_id) REFERENCES structural_alert_sets (alert_set_id) ON DELETE CASCADE, + CONSTRAINT uk_str_alert_smarts UNIQUE (alert_set_id, alert_name, smarts) +); +CREATE TABLE product_patents ( + prod_pat_id BIGINT NOT NULL, + product_id VARCHAR(30) NOT NULL, + patent_no VARCHAR(20) NOT NULL, + patent_expire_date DATETIME NOT NULL, + drug_substance_flag SMALLINT NOT NULL, + drug_product_flag SMALLINT NOT NULL, + patent_use_code VARCHAR(10), + delist_flag SMALLINT NOT NULL, + submission_date DATETIME, + CONSTRAINT pk_prod_pat_id PRIMARY KEY (prod_pat_id), + CONSTRAINT fk_prod_pat_product_id FOREIGN KEY(product_id) REFERENCES products (product_id) ON DELETE CASCADE, + CONSTRAINT fk_prod_pat_use_code FOREIGN KEY(patent_use_code) REFERENCES patent_use_codes (patent_use_code) ON DELETE CASCADE, + CONSTRAINT ck_patents_delistflag CHECK (delist_flag IN (0, 1)), + CONSTRAINT ck_patents_prodflag CHECK (drug_product_flag IN (0, 1)), + CONSTRAINT ck_patents_subsflag CHECK (drug_substance_flag IN (0, 1)), + CONSTRAINT uk_prod_pat UNIQUE (product_id, patent_no, patent_expire_date, patent_use_code) +); +CREATE TABLE component_synonyms ( + compsyn_id BIGINT NOT NULL, + component_id BIGINT NOT NULL, + component_synonym VARCHAR(500), + syn_type VARCHAR(20), + CONSTRAINT pk_compsyn_synid PRIMARY KEY (compsyn_id), + CONSTRAINT fk_compsyn_compid FOREIGN KEY(component_id) REFERENCES component_sequences (component_id) ON DELETE CASCADE, + CONSTRAINT ck_compsyn_syntype CHECK (syn_type in ('GENE_SYMBOL','GENE_SYMBOL_OTHER','UNIPROT','MANUAL','OTHER','EC_NUMBER')), + CONSTRAINT uk_compsyn UNIQUE (component_id, component_synonym, syn_type) +); +CREATE TABLE research_companies ( + co_stem_id BIGINT NOT NULL, + res_stem_id BIGINT, + company VARCHAR(100), + country VARCHAR(50), + previous_company VARCHAR(100), + CONSTRAINT pk_resco_co_stem_id PRIMARY KEY (co_stem_id), + CONSTRAINT fk_resco_res_stem_id FOREIGN KEY(res_stem_id) REFERENCES research_stem (res_stem_id) ON DELETE CASCADE, + CONSTRAINT uk_resco_stem_co UNIQUE (res_stem_id, company) +); +CREATE TABLE component_domains ( + compd_id BIGINT NOT NULL, + domain_id BIGINT, + component_id BIGINT NOT NULL, + start_position BIGINT, + end_position BIGINT, + CONSTRAINT pk_compd_id PRIMARY KEY (compd_id), + CONSTRAINT fk_compd_compid FOREIGN KEY(component_id) REFERENCES component_sequences (component_id) ON DELETE CASCADE, + CONSTRAINT fk_compd_domainid FOREIGN KEY(domain_id) REFERENCES domains (domain_id) ON DELETE CASCADE, + CONSTRAINT ck_compd_end CHECK (end_position > 0), + CONSTRAINT ck_compd_start CHECK (start_position > 0), + CONSTRAINT uk_compd_start UNIQUE (domain_id, component_id, start_position) +); +CREATE TABLE component_go ( + comp_go_id BIGINT NOT NULL, + component_id BIGINT NOT NULL, + go_id VARCHAR(10) NOT NULL, + CONSTRAINT pk_comp_go PRIMARY KEY (comp_go_id), + CONSTRAINT fk_comp_id FOREIGN KEY(component_id) REFERENCES component_sequences (component_id) ON DELETE CASCADE, + CONSTRAINT fk_go_id FOREIGN KEY(go_id) REFERENCES go_classification (go_id) ON DELETE CASCADE, + CONSTRAINT uk_comp_go UNIQUE (component_id, go_id) +); +CREATE TABLE molecule_irac_classification ( + mol_irac_id BIGINT NOT NULL, + irac_class_id BIGINT NOT NULL, + molregno BIGINT NOT NULL, + CONSTRAINT molecule_irac_classificationpk PRIMARY KEY (mol_irac_id), + CONSTRAINT fk_irac_class_id FOREIGN KEY(irac_class_id) REFERENCES irac_classification (irac_class_id) ON DELETE CASCADE, + CONSTRAINT fk_irac_molregno FOREIGN KEY(molregno) REFERENCES molecule_dictionary (molregno) ON DELETE CASCADE, + CONSTRAINT uk_mol_irac_class UNIQUE (irac_class_id, molregno) +); +CREATE TABLE molecule_atc_classification ( + mol_atc_id BIGINT NOT NULL, + level5 VARCHAR(10) NOT NULL, + molregno BIGINT NOT NULL, + CONSTRAINT pk_molatc_mol_atc_id PRIMARY KEY (mol_atc_id), + CONSTRAINT fk_molatc_level5 FOREIGN KEY(level5) REFERENCES atc_classification (level5) ON DELETE CASCADE, + CONSTRAINT fk_molatc_molregno FOREIGN KEY(molregno) REFERENCES molecule_dictionary (molregno) ON DELETE CASCADE +); +CREATE TABLE assays ( + assay_id BIGINT NOT NULL, + doc_id BIGINT NOT NULL, + description VARCHAR(4000), + assay_type VARCHAR(1), + assay_test_type VARCHAR(20), + assay_category VARCHAR(20), + assay_organism VARCHAR(250), + assay_tax_id BIGINT, + assay_strain VARCHAR(200), + assay_tissue VARCHAR(100), + assay_cell_type VARCHAR(100), + assay_subcellular_fraction VARCHAR(100), + tid BIGINT, + relationship_type VARCHAR(1), + confidence_score SMALLINT, + curated_by VARCHAR(32), + src_id INTEGER NOT NULL, + src_assay_id VARCHAR(50), + chembl_id VARCHAR(20) NOT NULL, + cell_id BIGINT, + bao_format VARCHAR(11), + tissue_id BIGINT, + variant_id BIGINT, + aidx VARCHAR(200) NOT NULL, + CONSTRAINT pk_assays_assay_id PRIMARY KEY (assay_id), + CONSTRAINT fk_assays_assaytype FOREIGN KEY(assay_type) REFERENCES assay_type (assay_type) ON DELETE CASCADE, + CONSTRAINT fk_assays_cell_id FOREIGN KEY(cell_id) REFERENCES cell_dictionary (cell_id) ON DELETE CASCADE, + CONSTRAINT fk_assays_chembl_id FOREIGN KEY(chembl_id) REFERENCES chembl_id_lookup (chembl_id) ON DELETE CASCADE, + CONSTRAINT fk_assays_confscore FOREIGN KEY(confidence_score) REFERENCES confidence_score_lookup (confidence_score) ON DELETE CASCADE, + CONSTRAINT fk_assays_cur_by FOREIGN KEY(curated_by) REFERENCES curation_lookup (curated_by) ON DELETE CASCADE, + CONSTRAINT fk_assays_doc_id FOREIGN KEY(doc_id) REFERENCES docs (doc_id) ON DELETE CASCADE, + CONSTRAINT fk_assays_reltype FOREIGN KEY(relationship_type) REFERENCES relationship_type (relationship_type) ON DELETE CASCADE, + CONSTRAINT fk_assays_src_id FOREIGN KEY(src_id) REFERENCES source (src_id) ON DELETE CASCADE, + CONSTRAINT fk_assays_tid FOREIGN KEY(tid) REFERENCES target_dictionary (tid) ON DELETE CASCADE, + CONSTRAINT fk_assays_tissue_id FOREIGN KEY(tissue_id) REFERENCES tissue_dictionary (tissue_id) ON DELETE CASCADE, + CONSTRAINT fk_assays_variant_id FOREIGN KEY(variant_id) REFERENCES variant_sequences (variant_id) ON DELETE CASCADE, + CONSTRAINT fk_chembl_bao_format FOREIGN KEY(bao_format) REFERENCES bioassay_ontology (bao_id) ON DELETE CASCADE, + CONSTRAINT ck_assays_category CHECK (assay_category in ('screening','panel','confirmatory','summary','other')), + CONSTRAINT ck_assays_chemblid CHECK (chembl_id like ('CHEMBL%')), + CONSTRAINT ck_assays_testtype CHECK (assay_test_type in ('In vivo','In vitro','Ex vivo')), + CONSTRAINT uk_assays_chemblid UNIQUE (chembl_id) +); +CREATE TABLE compound_records ( + record_id BIGINT NOT NULL, + molregno BIGINT, + doc_id BIGINT NOT NULL, + compound_key VARCHAR(250), + compound_name VARCHAR(4000), + src_id INTEGER NOT NULL, + src_compound_id VARCHAR(150), + cidx VARCHAR(200) NOT NULL, + CONSTRAINT pk_cmpdrec_record_id PRIMARY KEY (record_id), + CONSTRAINT fk_cmpdrec_doc_id FOREIGN KEY(doc_id) REFERENCES docs (doc_id) ON DELETE CASCADE, + CONSTRAINT fk_cmpdrec_molregno FOREIGN KEY(molregno) REFERENCES molecule_dictionary (molregno) ON DELETE CASCADE, + CONSTRAINT fk_cmpdrec_src_id FOREIGN KEY(src_id) REFERENCES source (src_id) ON DELETE CASCADE +); +CREATE TABLE binding_sites ( + site_id BIGINT NOT NULL, + site_name VARCHAR(200), + tid BIGINT, + CONSTRAINT pk_bindsite_id PRIMARY KEY (site_id), + CONSTRAINT fk_bindsite_tid FOREIGN KEY(tid) REFERENCES target_dictionary (tid) ON DELETE CASCADE +); +CREATE TABLE target_relations ( + tid BIGINT NOT NULL, + relationship VARCHAR(20) NOT NULL, + related_tid BIGINT NOT NULL, + targrel_id BIGINT NOT NULL, + CONSTRAINT target_relations_pk PRIMARY KEY (targrel_id), + CONSTRAINT fk_targrel_reltid FOREIGN KEY(related_tid) REFERENCES target_dictionary (tid) ON DELETE CASCADE, + CONSTRAINT fk_targrel_tid FOREIGN KEY(tid) REFERENCES target_dictionary (tid) ON DELETE CASCADE, + CONSTRAINT ck_targrel_rel CHECK (relationship in ('EQUIVALENT TO', 'OVERLAPS WITH', 'SUBSET OF', 'SUPERSET OF')) +); +CREATE TABLE compound_structures ( + molregno BIGINT NOT NULL, + molfile TEXT, + standard_inchi VARCHAR(4000), + standard_inchi_key VARCHAR(27) NOT NULL, + canonical_smiles VARCHAR(4000), + CONSTRAINT pk_cmpdstr_molregno PRIMARY KEY (molregno), + CONSTRAINT fk_cmpdstr_molregno FOREIGN KEY(molregno) REFERENCES molecule_dictionary (molregno) ON DELETE CASCADE, + CONSTRAINT uk_cmpdstr_stdinch UNIQUE (standard_inchi), + CONSTRAINT uk_cmpdstr_stdinchkey UNIQUE (standard_inchi_key) +); +CREATE TABLE biotherapeutics ( + molregno BIGINT NOT NULL, + description VARCHAR(2000), + helm_notation VARCHAR(4000), + CONSTRAINT pk_biother_molregno PRIMARY KEY (molregno), + CONSTRAINT fk_biother_molregno FOREIGN KEY(molregno) REFERENCES molecule_dictionary (molregno) ON DELETE CASCADE +); +CREATE TABLE compound_structural_alerts ( + cpd_str_alert_id BIGINT NOT NULL, + molregno BIGINT NOT NULL, + alert_id BIGINT NOT NULL, + CONSTRAINT pk_cpd_str_alert_id PRIMARY KEY (cpd_str_alert_id), + CONSTRAINT fk_cpd_str_alert_id FOREIGN KEY(alert_id) REFERENCES structural_alerts (alert_id) ON DELETE CASCADE, + CONSTRAINT fk_cpd_str_alert_molregno FOREIGN KEY(molregno) REFERENCES molecule_dictionary (molregno) ON DELETE CASCADE, + CONSTRAINT uk_cpd_str_alert UNIQUE (molregno, alert_id) +); +CREATE TABLE molecule_hierarchy ( + molregno BIGINT NOT NULL, + parent_molregno BIGINT, + active_molregno BIGINT, + CONSTRAINT pk_molhier_molregno PRIMARY KEY (molregno), + CONSTRAINT fk_molhier_active_molregno FOREIGN KEY(active_molregno) REFERENCES molecule_dictionary (molregno) ON DELETE CASCADE, + CONSTRAINT fk_molhier_molregno FOREIGN KEY(molregno) REFERENCES molecule_dictionary (molregno) ON DELETE CASCADE, + CONSTRAINT fk_molhier_parent_molregno FOREIGN KEY(parent_molregno) REFERENCES molecule_dictionary (molregno) ON DELETE CASCADE +); +CREATE TABLE molecule_synonyms ( + molregno BIGINT NOT NULL, + syn_type VARCHAR(50) NOT NULL, + molsyn_id BIGINT NOT NULL, + res_stem_id BIGINT, + synonyms VARCHAR(200), + CONSTRAINT pk_cmpdsyns_synid PRIMARY KEY (molsyn_id), + CONSTRAINT fk_cmpdsyns_molregno FOREIGN KEY(molregno) REFERENCES molecule_dictionary (molregno) ON DELETE CASCADE, + CONSTRAINT fk_cmpdsyns_resstem FOREIGN KEY(res_stem_id) REFERENCES research_stem (res_stem_id) ON DELETE CASCADE, + CONSTRAINT uk_cmpdsyns UNIQUE (molregno, syn_type, synonyms) +); +CREATE TABLE molecule_hrac_classification ( + mol_hrac_id BIGINT NOT NULL, + hrac_class_id BIGINT NOT NULL, + molregno BIGINT NOT NULL, + CONSTRAINT molecule_hrac_classificationpk PRIMARY KEY (mol_hrac_id), + CONSTRAINT fk_hrac_class_id FOREIGN KEY(hrac_class_id) REFERENCES hrac_classification (hrac_class_id) ON DELETE CASCADE, + CONSTRAINT fk_hrac_molregno FOREIGN KEY(molregno) REFERENCES molecule_dictionary (molregno) ON DELETE CASCADE, + CONSTRAINT uk_mol_hrac_class UNIQUE (hrac_class_id, molregno) +); +CREATE TABLE target_components ( + tid BIGINT NOT NULL, + component_id BIGINT NOT NULL, + targcomp_id BIGINT NOT NULL, + homologue SMALLINT NOT NULL, + CONSTRAINT pk_targcomp_id PRIMARY KEY (targcomp_id), + CONSTRAINT fk_targcomp_compid FOREIGN KEY(component_id) REFERENCES component_sequences (component_id) ON DELETE CASCADE, + CONSTRAINT fk_targcomp_tid FOREIGN KEY(tid) REFERENCES target_dictionary (tid) ON DELETE CASCADE, + CONSTRAINT ck_targcomp_hom CHECK (homologue in (0,1,2)), + CONSTRAINT uk_targcomp_tid_compid UNIQUE (tid, component_id) +); +CREATE TABLE compound_properties ( + molregno BIGINT NOT NULL, + mw_freebase NUMERIC(9, 2), + alogp NUMERIC(9, 2), + hba INTEGER, + hbd INTEGER, + psa NUMERIC(9, 2), + rtb INTEGER, + ro3_pass VARCHAR(3), + num_ro5_violations SMALLINT, + cx_most_apka NUMERIC(9, 2), + cx_most_bpka NUMERIC(9, 2), + cx_logp NUMERIC(9, 2), + cx_logd NUMERIC(9, 2), + molecular_species VARCHAR(50), + full_mwt NUMERIC(9, 2), + aromatic_rings INTEGER, + heavy_atoms INTEGER, + qed_weighted NUMERIC(3, 2), + mw_monoisotopic NUMERIC(11, 4), + full_molformula VARCHAR(100), + hba_lipinski INTEGER, + hbd_lipinski INTEGER, + num_lipinski_ro5_violations SMALLINT, + CONSTRAINT pk_cmpdprop_molregno PRIMARY KEY (molregno), + CONSTRAINT fk_cmpdprop_molregno FOREIGN KEY(molregno) REFERENCES molecule_dictionary (molregno) ON DELETE CASCADE, + CONSTRAINT ck_cmpdprop_aromatic CHECK (aromatic_rings >= 0), + CONSTRAINT ck_cmpdprop_bpka CHECK (CX_MOST_BPKA>=0), + CONSTRAINT ck_cmpdprop_fullmw CHECK (full_mwt > 0), + CONSTRAINT ck_cmpdprop_hba CHECK (hba >= 0), + CONSTRAINT ck_cmpdprop_hba_lip CHECK (hba_lipinski >= 0), + CONSTRAINT ck_cmpdprop_hbd CHECK (hbd >= 0), + CONSTRAINT ck_cmpdprop_hbd_lip CHECK (hbd_lipinski >= 0), + CONSTRAINT ck_cmpdprop_heavy CHECK (heavy_atoms >= 0), + CONSTRAINT ck_cmpdprop_lip_ro5 CHECK (num_lipinski_ro5_violations in (0,1,2,3,4)), + CONSTRAINT ck_cmpdprop_mwfree CHECK (mw_freebase > 0), + CONSTRAINT ck_cmpdprop_psa CHECK (psa >= 0), + CONSTRAINT ck_cmpdprop_qed CHECK (qed_weighted >= 0), + CONSTRAINT ck_cmpdprop_ro3 CHECK (ro3_pass in ('Y','N')), + CONSTRAINT ck_cmpdprop_ro5 CHECK (num_ro5_violations in (0,1,2,3,4)), + CONSTRAINT ck_cmpdprop_rtb CHECK (rtb >= 0), + CONSTRAINT ck_cmpdprop_species CHECK (molecular_species in ('ACID','BASE','ZWITTERION','NEUTRAL')) +); +CREATE TABLE molecule_frac_classification ( + mol_frac_id BIGINT NOT NULL, + frac_class_id BIGINT NOT NULL, + molregno BIGINT NOT NULL, + CONSTRAINT molecule_frac_classificationpk PRIMARY KEY (mol_frac_id), + CONSTRAINT fk_frac_class_id FOREIGN KEY(frac_class_id) REFERENCES frac_classification (frac_class_id) ON DELETE CASCADE, + CONSTRAINT fk_frac_molregno FOREIGN KEY(molregno) REFERENCES molecule_dictionary (molregno) ON DELETE CASCADE, + CONSTRAINT uk_mol_frac_class UNIQUE (frac_class_id, molregno) +); +CREATE TABLE drug_mechanism ( + mec_id BIGINT NOT NULL, + record_id BIGINT NOT NULL, + molregno BIGINT, + mechanism_of_action VARCHAR(250), + tid BIGINT, + site_id BIGINT, + action_type VARCHAR(50), + direct_interaction SMALLINT, + molecular_mechanism SMALLINT, + disease_efficacy SMALLINT, + mechanism_comment VARCHAR(2000), + selectivity_comment VARCHAR(1000), + binding_site_comment VARCHAR(1000), + variant_id BIGINT, + CONSTRAINT molecule_mechanism_pk PRIMARY KEY (mec_id), + CONSTRAINT fk_drugmec_actiontype FOREIGN KEY(action_type) REFERENCES action_type (action_type) ON DELETE CASCADE, + CONSTRAINT fk_drugmec_molregno FOREIGN KEY(molregno) REFERENCES molecule_dictionary (molregno) ON DELETE CASCADE, + CONSTRAINT fk_drugmec_rec_id FOREIGN KEY(record_id) REFERENCES compound_records (record_id) ON DELETE CASCADE, + CONSTRAINT fk_drugmec_site_id FOREIGN KEY(site_id) REFERENCES binding_sites (site_id) ON DELETE CASCADE, + CONSTRAINT fk_drugmec_tid FOREIGN KEY(tid) REFERENCES target_dictionary (tid) ON DELETE CASCADE, + CONSTRAINT fk_drugmec_varid FOREIGN KEY(variant_id) REFERENCES variant_sequences (variant_id) ON DELETE CASCADE, + CONSTRAINT ck_drugmec_direct CHECK (direct_interaction in (0,1)), + CONSTRAINT ck_drugmec_efficacy CHECK (disease_efficacy in (0,1)), + CONSTRAINT ck_drugmec_molecular CHECK (molecular_mechanism in (0,1)) +); +CREATE TABLE drug_indication ( + drugind_id BIGINT NOT NULL, + record_id BIGINT NOT NULL, + molregno BIGINT, + max_phase_for_ind SMALLINT, + mesh_id VARCHAR(20) NOT NULL, + mesh_heading VARCHAR(200) NOT NULL, + efo_id VARCHAR(20), + efo_term VARCHAR(200), + CONSTRAINT drugind_pk PRIMARY KEY (drugind_id), + CONSTRAINT drugind_molregno_fk FOREIGN KEY(molregno) REFERENCES molecule_dictionary (molregno) ON DELETE CASCADE, + CONSTRAINT drugind_rec_fk FOREIGN KEY(record_id) REFERENCES compound_records (record_id) ON DELETE CASCADE, + CONSTRAINT drugind_phase_ck CHECK (MAX_PHASE_FOR_IND=0 OR MAX_PHASE_FOR_IND=1 OR MAX_PHASE_FOR_IND=2 OR MAX_PHASE_FOR_IND=3 OR MAX_PHASE_FOR_IND=4), + CONSTRAINT drugind_uk UNIQUE (record_id, mesh_id, efo_id) +); +CREATE TABLE assay_parameters ( + assay_param_id BIGINT NOT NULL, + assay_id BIGINT NOT NULL, + type VARCHAR(250) NOT NULL, + relation VARCHAR(50), + value NUMERIC, + units VARCHAR(100), + text_value VARCHAR(4000), + standard_type VARCHAR(250), + standard_relation VARCHAR(50), + standard_value NUMERIC, + standard_units VARCHAR(100), + standard_text_value VARCHAR(4000), + comments VARCHAR(4000), + CONSTRAINT pk_assay_param PRIMARY KEY (assay_param_id), + CONSTRAINT fk_assay_param_assayid FOREIGN KEY(assay_id) REFERENCES assays (assay_id) ON DELETE CASCADE, + CONSTRAINT uk_assay_param UNIQUE (assay_id, type) +); +CREATE TABLE activities ( + activity_id BIGINT NOT NULL, + assay_id BIGINT NOT NULL, + doc_id BIGINT, + record_id BIGINT NOT NULL, + molregno BIGINT, + standard_relation VARCHAR(50), + standard_value NUMERIC, + standard_units VARCHAR(100), + standard_flag SMALLINT, + standard_type VARCHAR(250), + activity_comment VARCHAR(4000), + data_validity_comment VARCHAR(30), + potential_duplicate SMALLINT, + pchembl_value NUMERIC(4, 2), + bao_endpoint VARCHAR(11), + uo_units VARCHAR(10), + qudt_units VARCHAR(70), + toid INTEGER, + upper_value NUMERIC, + standard_upper_value NUMERIC, + src_id INTEGER, + type VARCHAR(250) NOT NULL, + relation VARCHAR(50), + value NUMERIC, + units VARCHAR(100), + text_value VARCHAR(1000), + standard_text_value VARCHAR(1000), + CONSTRAINT pk_act_activity_id PRIMARY KEY (activity_id), + CONSTRAINT fk_act_assay_id FOREIGN KEY(assay_id) REFERENCES assays (assay_id) ON DELETE CASCADE, + CONSTRAINT fk_act_bao_endpoint FOREIGN KEY(bao_endpoint) REFERENCES bioassay_ontology (bao_id) ON DELETE CASCADE, + CONSTRAINT fk_act_doc_id FOREIGN KEY(doc_id) REFERENCES docs (doc_id) ON DELETE CASCADE, + CONSTRAINT fk_act_molregno FOREIGN KEY(molregno) REFERENCES molecule_dictionary (molregno) ON DELETE CASCADE, + CONSTRAINT fk_act_record_id FOREIGN KEY(record_id) REFERENCES compound_records (record_id) ON DELETE CASCADE, + CONSTRAINT fk_act_src_id FOREIGN KEY(src_id) REFERENCES source (src_id) ON DELETE CASCADE, + CONSTRAINT fk_data_val_comm FOREIGN KEY(data_validity_comment) REFERENCES data_validity_lookup (data_validity_comment) ON DELETE CASCADE, + CONSTRAINT ck_potential_dup CHECK (POTENTIAL_DUPLICATE IN (0,1)), + CONSTRAINT ck_stand_flag CHECK (standard_flag in (0,1)), + CONSTRAINT ck_stand_relation CHECK (standard_relation in ('>','<','=','~','<=','>=','<<','>>')) +); +CREATE TABLE drug_warning ( + warning_id BIGINT NOT NULL, + record_id BIGINT, + molregno BIGINT, + warning_type VARCHAR(20), + warning_class VARCHAR(100), + warning_description VARCHAR(4000), + warning_country VARCHAR(1000), + warning_year INTEGER, + CONSTRAINT sys_c00117264 PRIMARY KEY (warning_id), + CONSTRAINT fk_warning_record_id FOREIGN KEY(record_id) REFERENCES compound_records (record_id) ON DELETE CASCADE +); +CREATE TABLE metabolism ( + met_id BIGINT NOT NULL, + drug_record_id BIGINT, + substrate_record_id BIGINT, + metabolite_record_id BIGINT, + pathway_id BIGINT, + pathway_key VARCHAR(50), + enzyme_name VARCHAR(200), + enzyme_tid BIGINT, + met_conversion VARCHAR(200), + organism VARCHAR(100), + tax_id BIGINT, + met_comment VARCHAR(1000), + CONSTRAINT pk_rec_met_id PRIMARY KEY (met_id), + CONSTRAINT fk_recmet_drug_recid FOREIGN KEY(drug_record_id) REFERENCES compound_records (record_id) ON DELETE CASCADE, + CONSTRAINT fk_recmet_met_recid FOREIGN KEY(metabolite_record_id) REFERENCES compound_records (record_id) ON DELETE CASCADE, + CONSTRAINT fk_recmet_sub_recid FOREIGN KEY(substrate_record_id) REFERENCES compound_records (record_id) ON DELETE CASCADE, + CONSTRAINT fk_recmet_tid FOREIGN KEY(enzyme_tid) REFERENCES target_dictionary (tid) ON DELETE CASCADE, + CONSTRAINT uk_recmet UNIQUE (drug_record_id, substrate_record_id, metabolite_record_id, pathway_id, enzyme_name, enzyme_tid, tax_id) +); +CREATE TABLE biotherapeutic_components ( + biocomp_id BIGINT NOT NULL, + molregno BIGINT NOT NULL, + component_id BIGINT NOT NULL, + CONSTRAINT pk_biocomp_id PRIMARY KEY (biocomp_id), + CONSTRAINT fk_biocomp_compid FOREIGN KEY(component_id) REFERENCES bio_component_sequences (component_id) ON DELETE CASCADE, + CONSTRAINT fk_biocomp_molregno FOREIGN KEY(molregno) REFERENCES biotherapeutics (molregno) ON DELETE CASCADE, + CONSTRAINT uk_biocomp UNIQUE (molregno, component_id) +); +CREATE TABLE assay_class_map ( + ass_cls_map_id BIGINT NOT NULL, + assay_id BIGINT NOT NULL, + assay_class_id BIGINT NOT NULL, + CONSTRAINT pk_assay_cls_map PRIMARY KEY (ass_cls_map_id), + CONSTRAINT fk_ass_cls_map_assay FOREIGN KEY(assay_id) REFERENCES assays (assay_id) ON DELETE CASCADE, + CONSTRAINT fk_ass_cls_map_class FOREIGN KEY(assay_class_id) REFERENCES assay_classification (assay_class_id) ON DELETE CASCADE, + CONSTRAINT uk_ass_cls_map UNIQUE (assay_id, assay_class_id) +); +CREATE TABLE formulations ( + product_id VARCHAR(30) NOT NULL, + ingredient VARCHAR(200), + strength VARCHAR(300), + record_id BIGINT NOT NULL, + molregno BIGINT, + formulation_id BIGINT NOT NULL, + CONSTRAINT pk_formulations_id PRIMARY KEY (formulation_id), + CONSTRAINT fk_formulations_molregno FOREIGN KEY(molregno) REFERENCES molecule_dictionary (molregno) ON DELETE CASCADE, + CONSTRAINT fk_formulations_productid FOREIGN KEY(product_id) REFERENCES products (product_id) ON DELETE CASCADE, + CONSTRAINT fk_formulations_recid FOREIGN KEY(record_id) REFERENCES compound_records (record_id) ON DELETE CASCADE, + CONSTRAINT uk_formulations UNIQUE (product_id, record_id) +); +CREATE TABLE site_components ( + sitecomp_id BIGINT NOT NULL, + site_id BIGINT NOT NULL, + component_id BIGINT, + domain_id BIGINT, + site_residues VARCHAR(2000), + CONSTRAINT pk_sitecomp_id PRIMARY KEY (sitecomp_id), + CONSTRAINT fk_sitecomp_compid FOREIGN KEY(component_id) REFERENCES component_sequences (component_id) ON DELETE CASCADE, + CONSTRAINT fk_sitecomp_domainid FOREIGN KEY(domain_id) REFERENCES domains (domain_id) ON DELETE CASCADE, + CONSTRAINT fk_sitecomp_siteid FOREIGN KEY(site_id) REFERENCES binding_sites (site_id) ON DELETE CASCADE, + CONSTRAINT uk_sitecomp UNIQUE (site_id, component_id, domain_id) +); +CREATE TABLE indication_refs ( + indref_id BIGINT NOT NULL, + drugind_id BIGINT NOT NULL, + ref_type VARCHAR(50) NOT NULL, + ref_id VARCHAR(4000) NOT NULL, + ref_url VARCHAR(4000) NOT NULL, + CONSTRAINT indication_refs_pk PRIMARY KEY (indref_id), + CONSTRAINT indref_drugind_fk FOREIGN KEY(drugind_id) REFERENCES drug_indication (drugind_id) ON DELETE CASCADE, + CONSTRAINT indref_uk UNIQUE (drugind_id, ref_type, ref_id) +); +CREATE TABLE mechanism_refs ( + mecref_id BIGINT NOT NULL, + mec_id BIGINT NOT NULL, + ref_type VARCHAR(50) NOT NULL, + ref_id VARCHAR(200), + ref_url VARCHAR(400), + CONSTRAINT pk_mechanism_refs PRIMARY KEY (mecref_id), + CONSTRAINT fk_mechanism_refs_mecid FOREIGN KEY(mec_id) REFERENCES drug_mechanism (mec_id) ON DELETE CASCADE, + CONSTRAINT ck_mechanism_ref_type CHECK (ref_type in ('PMDA','ISBN','IUPHAR','DOI','EMA','PubMed','USPO','DailyMed','FDA','Expert','Other','InterPro','Wikipedia','UniProt','KEGG','PMC','ClinicalTrials','PubChem','Patent','BNF','HMA')), + CONSTRAINT uk_mechanism_refs UNIQUE (mec_id, ref_type, ref_id) +); +CREATE TABLE activity_properties ( + ap_id BIGINT NOT NULL, + activity_id BIGINT NOT NULL, + type VARCHAR(250) NOT NULL, + relation VARCHAR(50), + value NUMERIC, + units VARCHAR(100), + text_value VARCHAR(1000), + standard_type VARCHAR(250), + standard_relation VARCHAR(50), + standard_value NUMERIC, + standard_units VARCHAR(100), + standard_text_value VARCHAR(1000), + comments VARCHAR(4000), + result_flag SMALLINT NOT NULL, + CONSTRAINT pk_actprop_ap_id PRIMARY KEY (ap_id), + CONSTRAINT fk_activity_property FOREIGN KEY(activity_id) REFERENCES activities (activity_id) ON DELETE CASCADE, + CONSTRAINT uk_actprop_id_type UNIQUE (activity_id, type) +); +CREATE TABLE warning_refs ( + warnref_id BIGINT NOT NULL, + warning_id BIGINT, + ref_type VARCHAR(50), + ref_id VARCHAR(4000), + ref_url VARCHAR(4000), + CONSTRAINT sys_c00117259 PRIMARY KEY (warnref_id), + CONSTRAINT fk_warnref_warn_id FOREIGN KEY(warning_id) REFERENCES drug_warning (warning_id) ON DELETE CASCADE +); +CREATE TABLE predicted_binding_domains ( + predbind_id BIGINT NOT NULL, + activity_id BIGINT, + site_id BIGINT, + prediction_method VARCHAR(50), + confidence VARCHAR(10), + CONSTRAINT pk_predbinddom_predbind_id PRIMARY KEY (predbind_id), + CONSTRAINT fk_predbinddom_act_id FOREIGN KEY(activity_id) REFERENCES activities (activity_id) ON DELETE CASCADE, + CONSTRAINT fk_predbinddom_site_id FOREIGN KEY(site_id) REFERENCES binding_sites (site_id) ON DELETE CASCADE, + CONSTRAINT ck_predbinddom_conf CHECK (confidence in ('high','medium','low')), + CONSTRAINT ck_predbinddom_method CHECK (prediction_method in ('Manual','Single domain','Multi domain')) +); +CREATE TABLE metabolism_refs ( + metref_id BIGINT NOT NULL, + met_id BIGINT NOT NULL, + ref_type VARCHAR(50) NOT NULL, + ref_id VARCHAR(200), + ref_url VARCHAR(400), + CONSTRAINT pk_metref_id PRIMARY KEY (metref_id), + CONSTRAINT fk_metref_met_id FOREIGN KEY(met_id) REFERENCES metabolism (met_id) ON DELETE CASCADE, + CONSTRAINT uk_metref UNIQUE (met_id, ref_type, ref_id) +); +CREATE TABLE activity_supp_map ( + actsm_id BIGINT NOT NULL, + activity_id BIGINT NOT NULL, + smid BIGINT NOT NULL, + CONSTRAINT pk_actsm_id PRIMARY KEY (actsm_id), + CONSTRAINT fk_act_smid FOREIGN KEY(smid) REFERENCES activity_smid (smid) ON DELETE CASCADE, + CONSTRAINT fk_supp_act FOREIGN KEY(activity_id) REFERENCES activities (activity_id) ON DELETE CASCADE +); +CREATE TABLE ligand_eff ( + activity_id BIGINT NOT NULL, + bei NUMERIC(9, 2), + sei NUMERIC(9, 2), + le NUMERIC(9, 2), + lle NUMERIC(9, 2), + CONSTRAINT pk_ligeff_actid PRIMARY KEY (activity_id), + CONSTRAINT fk_ligeff_actid FOREIGN KEY(activity_id) REFERENCES activities (activity_id) ON DELETE CASCADE, + CONSTRAINT ck_ligeff_bei CHECK (bei > 0), + CONSTRAINT ck_ligeff_sei CHECK (sei > 0) +); +CREATE INDEX idx_moldict_pref_name ON molecule_dictionary (pref_name); +CREATE INDEX idx_moldict_max_phase ON molecule_dictionary (max_phase); +CREATE INDEX idx_moldict_ther_flag ON molecule_dictionary (therapeutic_flag); +CREATE UNIQUE INDEX idx_moldict_chembl_id ON molecule_dictionary (chembl_id); +CREATE UNIQUE INDEX organism_class_pk ON organism_class (oc_id); +CREATE INDEX idx_assays_doc_id ON assays (doc_id); +CREATE INDEX idx_assays_desc ON assays (description); +CREATE INDEX idx_assays_src_id ON assays (src_id); +CREATE UNIQUE INDEX idx_assays_chembl_id ON assays (chembl_id); +CREATE INDEX tmp_bao_format ON assays (bao_format); +CREATE INDEX idx_assay_assay_id ON assays (assay_type); +CREATE INDEX idx_docs_pmid ON docs (pubmed_id); +CREATE INDEX bmx_doc_iss ON docs (issue); +CREATE UNIQUE INDEX pk_doc_doc_id ON docs (doc_id); +CREATE INDEX bmx_doc_vol ON docs (volume); +CREATE INDEX bmx_doc_jrnl ON docs (journal); +CREATE INDEX bmx_doc_year ON docs (year); +CREATE UNIQUE INDEX pk_rt_rt ON relationship_type (relationship_type); +CREATE INDEX idx_td_pname ON target_dictionary (pref_name); +CREATE INDEX idx_td_org ON target_dictionary (organism); +CREATE INDEX idx_td_taxid ON target_dictionary (tax_id); +CREATE INDEX idx_td_t_type ON target_dictionary (target_type); +CREATE INDEX idx_td_chembl_id ON target_dictionary (chembl_id); +CREATE UNIQUE INDEX pk_tt_tt ON target_type (target_type); +CREATE UNIQUE INDEX tissue_dictionary_pk ON tissue_dictionary (tissue_id); +CREATE UNIQUE INDEX pk_comp_rec_recid ON compound_records (record_id); +CREATE INDEX idx_comp_rec_srccpid ON compound_records (src_compound_id); +CREATE INDEX fk_comp_rec_docid ON compound_records (doc_id); +CREATE INDEX idx_comp_rec_ckey ON compound_records (compound_key); +CREATE INDEX idx_comp_rec_cidx ON compound_records (cidx); +CREATE INDEX idx_comp_rec_srcid ON compound_records (src_id); +CREATE INDEX fk_comp_rec_molregno ON compound_records (molregno); +CREATE INDEX idx_actsupp_type ON activity_supp (type); +CREATE INDEX idx_actsupp_std_type ON activity_supp (standard_type); +CREATE INDEX idx_actsupp_rel ON activity_supp (relation); +CREATE INDEX idx_actsupp_std_val ON activity_supp (standard_value); +CREATE INDEX idx_actsupp_val ON activity_supp (value); +CREATE INDEX idx_actsupp_std_text ON activity_supp (standard_text_value); +CREATE INDEX idx_actsupp_units ON activity_supp (units); +CREATE INDEX idx_actsupp_std_rel ON activity_supp (standard_relation); +CREATE INDEX idx_actsupp_std_units ON activity_supp (standard_units); +CREATE INDEX idx_actsupp_text ON activity_supp (text_value); +CREATE UNIQUE INDEX protclass_pk ON protein_classification (protein_class_id); +CREATE INDEX idx_cmpdstr_smiles ON compound_structures (canonical_smiles); +CREATE UNIQUE INDEX compound_structures_pk ON compound_structures (molregno); +CREATE INDEX idx_cmpdstr_stdinchi ON compound_structures (standard_inchi); +CREATE INDEX idx_cmpdstr_stdkey ON compound_structures (standard_inchi_key); +CREATE UNIQUE INDEX bio_component_seqs_pk ON bio_component_sequences (component_id); +CREATE UNIQUE INDEX drug_indication_pk ON drug_indication (drugind_id); +CREATE UNIQUE INDEX mechanism_refs_uk ON mechanism_refs (mec_id, ref_type, ref_id); +CREATE UNIQUE INDEX mechanism_refs_pk ON mechanism_refs (mecref_id); +CREATE INDEX idx_assay_param_text ON assay_parameters (text_value); +CREATE INDEX idx_assay_param_std_val ON assay_parameters (standard_value); +CREATE INDEX idx_assay_param_rel ON assay_parameters (relation); +CREATE INDEX idx_assay_param_std_units ON assay_parameters (standard_units); +CREATE INDEX idx_assay_param_std_rel ON assay_parameters (standard_relation); +CREATE INDEX idx_assay_param_std_type ON assay_parameters (standard_type); +CREATE INDEX idx_assay_param_std_text ON assay_parameters (standard_text_value); +CREATE INDEX idx_assay_param_type ON assay_parameters (type); +CREATE INDEX idx_assay_param_units ON assay_parameters (units); +CREATE INDEX idx_assay_param_val ON assay_parameters (value); +CREATE UNIQUE INDEX assay_classification_pk ON assay_classification (assay_class_id); +CREATE UNIQUE INDEX structural_alert_set_pk ON structural_alert_sets (alert_set_id); +CREATE INDEX idx_actprop_resflag ON activity_properties (result_flag); +CREATE INDEX idx_actprop_val ON activity_properties (standard_value); +CREATE INDEX idx_act_prop_text ON activity_properties (standard_text_value); +CREATE INDEX idx_actprop_type ON activity_properties (standard_type); +CREATE INDEX idx_actprop_units ON activity_properties (standard_units); +CREATE INDEX idx_actprop_relation ON activity_properties (standard_relation); +CREATE INDEX idx_act_text ON activities (text_value); +CREATE INDEX fk_act_doc_id ON activities (doc_id); +CREATE INDEX idx_act_std_text ON activities (standard_text_value); +CREATE INDEX idx_act_type ON activities (type); +CREATE INDEX idx_act_std_type ON activities (standard_type); +CREATE INDEX fk_act_record_id ON activities (record_id); +CREATE INDEX idx_act_src_id ON activities (src_id); +CREATE INDEX idx_act_std_unit ON activities (standard_units); +CREATE INDEX idx_act_units ON activities (units); +CREATE INDEX idx_act_rel ON activities (relation); +CREATE INDEX idx_act_val ON activities (value); +CREATE INDEX idx_act_upper ON activities (upper_value); +CREATE INDEX idx_acc_relation ON activities (standard_relation); +CREATE INDEX idx_act_std_upper ON activities (standard_upper_value); +CREATE INDEX idx_act_pchembl ON activities (pchembl_value); +CREATE INDEX fk_act_molregno ON activities (molregno); +CREATE INDEX idx_act_std_val ON activities (standard_value); +CREATE INDEX fk_act_assay_id ON activities (assay_id); +CREATE INDEX idx_molhier_parent ON molecule_hierarchy (parent_molregno); +CREATE UNIQUE INDEX pk_actsmid ON activity_supp_map (actsm_id); +CREATE INDEX idx_cp_alogp ON compound_properties (alogp); +CREATE INDEX idx_cp_ro5 ON compound_properties (num_ro5_violations); +CREATE UNIQUE INDEX pk_com_molreg ON compound_properties (molregno); +CREATE INDEX idx_cp_hbd ON compound_properties (hbd); +CREATE INDEX idx_cp_hba ON compound_properties (hba); +CREATE INDEX idx_cp_mw ON compound_properties (mw_freebase); +CREATE INDEX idx_cp_rtb ON compound_properties (rtb); +CREATE INDEX idx_cp_psa ON compound_properties (psa); diff --git a/data/dev/genbank/flatGbk_test.seq.gz b/data/dev/genbank/flatGbk_test.seq.gz new file mode 100644 index 0000000..a465e94 Binary files /dev/null and b/data/dev/genbank/flatGbk_test.seq.gz differ diff --git a/data/dev/rhea_mini.rdf.gz b/data/dev/rhea_mini.rdf.gz new file mode 100644 index 0000000..3ef3b88 Binary files /dev/null and b/data/dev/rhea_mini.rdf.gz differ diff --git a/data/dev/rhea_to_uniprot_sprot.tsv.gz b/data/dev/rhea_to_uniprot_sprot.tsv.gz new file mode 100644 index 0000000..35bd939 Binary files /dev/null and b/data/dev/rhea_to_uniprot_sprot.tsv.gz differ diff --git a/data/dev/rhea_to_uniprot_trembl.tsv.gz b/data/dev/rhea_to_uniprot_trembl.tsv.gz new file mode 100644 index 0000000..ad5f2df Binary files /dev/null and b/data/dev/rhea_to_uniprot_trembl.tsv.gz differ diff --git a/data/dev/uniprot_sprot_test.xml.gz b/data/dev/uniprot_sprot_test.xml.gz new file mode 100644 index 0000000..fc6c8f1 Binary files /dev/null and b/data/dev/uniprot_sprot_test.xml.gz differ diff --git a/data/prod/rhea.rdf.gz b/data/prod/rhea.rdf.gz new file mode 100644 index 0000000..f8b01d8 Binary files /dev/null and b/data/prod/rhea.rdf.gz differ diff --git a/db/cmd/clean.go b/db/cmd/clean.go new file mode 100644 index 0000000..a3e1490 --- /dev/null +++ b/db/cmd/clean.go @@ -0,0 +1,16 @@ +package cmd + +import ( + "os" + + "github.com/spf13/cobra" +) + +var cleanCmd = &cobra.Command{ + Use: "clean", + Short: "Clean up the project", + Long: "Clean up the project", + Run: func(cmd *cobra.Command, args []string) { + os.Remove("ark.db") + }, +} diff --git a/db/cmd/download.go b/db/cmd/download.go new file mode 100644 index 0000000..165bf74 --- /dev/null +++ b/db/cmd/download.go @@ -0,0 +1,44 @@ +package cmd + +import ( + "github.com/spf13/cobra" +) + +/****************************************************************************** +ark needs an easy and reproducible way to grab all of the data it uses. +Some people would try to download everything manually with wget or curl but +that's bonkers and I'm not maintaining that. Instead I've made a multi-threaded webscraper. + +To be honest most of you reading this will probably just want to use the build +command which will download AND insert all of the data into ark for you +without any intermediary files but I'm leaving this here for the time being until +the build command is implemented. + +Download will download and store the latest versions of the following: + + - Rhea RDF file + - Rhea to Uniprot Sprot mapping file + - Rhea to Uniprot Trembl mapping file + - CHEMBL sqlite file + - Uniprot Sprot XML file + - Uniprot Trembl XML file + - All of Genbank's files + +Given that these urls are likely not to change I doubt there will be many if +any stability issues from upstream data sources. + +TODO: Create flags so that each data source can be downloaded individually. +TODO: Create way to track, report, and resume progress of downloads. + +TTFN, +Tim +******************************************************************************/ + +var downloadCmd = &cobra.Command{ + Use: "download", + Short: "Download data for standard deploy build. Run at your own risk.", + Long: "Download literally downloads all the base data needed to build a standard ark deployment the amount of data is dummy high to casually test on your personal machine. Run at your own risk.", + Run: func(cmd *cobra.Command, args []string) { + // download.ark() + }, +} diff --git a/db/cmd/download_test.go b/db/cmd/download_test.go new file mode 100644 index 0000000..fe2e9a5 --- /dev/null +++ b/db/cmd/download_test.go @@ -0,0 +1,11 @@ +package cmd + +/****************************************************************************** +So Download as a command is a total pain to test locally and needs mocks or a +fancy big server deployment to be fully tested. That's in the works but for now I've +just broken down most of what's needed into helper functions that can be easily +unit tested. + +TTFN, +Tim +******************************************************************************/ diff --git a/db/cmd/local.go b/db/cmd/local.go new file mode 100644 index 0000000..5086595 --- /dev/null +++ b/db/cmd/local.go @@ -0,0 +1,104 @@ +package cmd + +// var localCmd = &cobra.Command{ +// Use: "local", +// Short: "Builds a small test database for local development.", +// Run: func(cmd *cobra.Command, args []string) { +// local() +// }, +// } + +// func local() { +// // TODO: Check to see if the database already exists. Tells the user to run clean if they want a new install. +// if _, err := os.Stat("ark.db"); !os.IsNotExist(err) { +// log.Fatal("Database already exists. Run 'ark clean' to remove it.") +// } + +// // Begin SQLite +// log.Println("Creating database...") +// db, err := sqlx.Open("sqlite3", "ark.db") +// if err != nil { +// log.Fatalf("Failed to open sqlite in ark.db: %s", err) +// } + +// // Execute our schema in memory +// _, err = db.Exec(models.CreateSchema()) +// if err != nil { +// log.Fatalf("Failed to execute schema: %s", err) +// } + +// // Insert Rhea +// log.Printf("Inserting rhea") +// rhea, err := rhea.Read("rhea/data/rhea_mini.rdf.gz") +// if err != nil { +// log.Fatalf("Could not read rhea: %s", err) +// } + +// err = models.RheaInsert(db, rhea) +// if err != nil { +// log.Fatalf("Could not insert rhea: %s", err) +// } + +// // Insert Uniprot +// log.Printf("Inserting uniprot sprot") +// var wg sync.WaitGroup +// uniprotSprot, errors, err := uniprot.Read("data/uniprot_sprot_mini.xml.gz") +// if err != nil { +// log.Fatalf("Failed to read uniprot on error: %s", err) +// } +// wg.Add(1) +// go models.UniprotInsert(db, "sprot", uniprotSprot, errors, &wg) +// wg.Wait() + +// for err := range errors { +// if err.Error() != "EOF" { +// log.Fatalf("Failed on error during uniprot parsing or insertion: %s", err) +// } +// } + +// // log.Printf("Inserting uniprot trembl") +// // var wg2 sync.WaitGroup +// // uniprotTrembl, errors, err := uniprot.Read("uniprot/uniprot_trembl.xml.gz") +// // if err != nil { +// // log.Fatalf("Failed to read uniprot on error: %s", err) +// // } +// // wg2.Add(1) +// // go models.UniprotInsert(db, "trembl", uniprotTrembl, errors, &wg2) +// // wg2.Wait() + +// // for err := range errors { +// // if err.Error() != "EOF" { +// // log.Fatalf("Failed on error during uniprot trembl parsing or insertion: %s", err) +// // } +// // } +// // TODO: Use in big build command. + +// // Insert Genbank +// matches, err := filepath.Glob("genbank/*") +// if err != nil { +// log.Fatalf("Failed during opening glob: %s", err) +// } +// for _, match := range matches { +// log.Printf("Inserting genbank file %s", match) +// sequences := genbank.ReadFlatGz("data/flatGbk_test.seq.gz") +// err := models.GenbankInsert(db, sequences) +// if err != nil { +// log.Fatalf("Failed on error during genbank insertion: %s", err) +// } +// } + +// // Insert tsv +// log.Printf("Inserting rhea->uniprot sprot") +// err = models.RheaTsvInsert(db, "rhea/data/rhea2uniprot_sprot_minimized.tsv", false) +// if err != nil { +// log.Fatalf("Failed to insert RheaTsvInsert sprot on: %s", err) +// } + +// log.Printf("Inserting rhea->uniprot trembl") +// err = models.RheaTsvInsert(db, "data/rhea2uniprot_test.tsv.gz", true) +// if err != nil { +// log.Fatalf("Failed to insert RheaTsvInsert trembl on: %s", err) +// } + +// log.Printf("Finished ark") +// } diff --git a/db/cmd/root.go b/db/cmd/root.go new file mode 100644 index 0000000..92ed11e --- /dev/null +++ b/db/cmd/root.go @@ -0,0 +1,67 @@ +package cmd + +import ( + "fmt" + "os" + + _ "github.com/mattn/go-sqlite3" + "github.com/spf13/cobra" + "github.com/spf13/viper" +) + +var ( + // Used for flags. + cfgFile string + userLicense string +) + +var rootCmd = &cobra.Command{ + Use: "ark", + Short: "ark is dummy fast database for engineering organisms.", + Long: `ark is a dummy fast database for engineering organisms. Cross reference genbank, uniprot, chembl, and rhea and get you some JSON.`, +} + +func Execute() { + if err := rootCmd.Execute(); err != nil { + fmt.Fprintln(os.Stderr, err) + os.Exit(1) + } +} + +func init() { + cobra.OnInitialize(initConfig) + + rootCmd.PersistentFlags().StringVar(&cfgFile, "config", "", "config file (default is $HOME/.cobra.yaml)") + rootCmd.PersistentFlags().StringP("author", "a", "Timothy Stiles, Keoni Gandall", "author name for copyright attribution") + rootCmd.PersistentFlags().StringVarP(&userLicense, "license", "l", "", "MIT") + rootCmd.PersistentFlags().Bool("viper", true, "use Viper for configuration") + // viper.BindPFlag("author", rootCmd.PersistentFlags().Lookup("author")) + // viper.BindPFlag("useViper", rootCmd.PersistentFlags().Lookup("viper")) + viper.SetDefault("author", "Timothy Stiles, Keoni Gandall") + viper.SetDefault("license", "MIT") + // rootCmd.AddCommand(localCmd) + rootCmd.AddCommand(cleanCmd) + rootCmd.AddCommand(downloadCmd) +} + +func initConfig() { + if cfgFile != "" { + // Use config file from the flag. + viper.SetConfigFile(cfgFile) + } else { + // Find home directory. + home, err := os.UserHomeDir() + cobra.CheckErr(err) + + // Search config in home directory with name ".cobra" (without extension). + viper.AddConfigPath(home) + viper.SetConfigType("yaml") + viper.SetConfigName(".cobra") + } + + viper.AutomaticEnv() + + if err := viper.ReadInConfig(); err == nil { + fmt.Println("Using config file:", viper.ConfigFileUsed()) + } +} diff --git a/go.mod b/go.mod index ad5f699..0a86783 100644 --- a/go.mod +++ b/go.mod @@ -3,54 +3,71 @@ module github.com/bebop/ark go 1.18 require ( - github.com/google/uuid v1.3.1 + github.com/PuerkitoBio/goquery v1.7.1 + github.com/friendsofgo/errors v0.9.2 + github.com/gin-gonic/gin v1.8.0 + github.com/huandu/go-sqlbuilder v1.13.0 github.com/jmoiron/sqlx v1.3.4 - github.com/lib/pq v1.2.1-0.20191011153232-f91d3411e481 github.com/mattn/go-sqlite3 v1.14.12 - github.com/spf13/viper v1.16.0 - gorm.io/driver/postgres v1.5.2 - gorm.io/gorm v1.25.4 + github.com/spf13/cobra v1.2.1 + github.com/spf13/viper v1.9.0 + github.com/volatiletech/null/v8 v8.1.2 + github.com/volatiletech/sqlboiler/v4 v4.11.0 + github.com/volatiletech/strmangle v0.0.4 + modernc.org/sqlite v1.17.3 ) require ( - github.com/bytedance/sonic v1.9.1 // indirect - github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311 // indirect - github.com/fsnotify/fsnotify v1.6.0 // indirect - github.com/gabriel-vasile/mimetype v1.4.2 // indirect + github.com/andybalholm/cascadia v1.2.0 // indirect + github.com/ericlagergren/decimal v0.0.0-20181231230500-73749d4874d5 // indirect + github.com/fsnotify/fsnotify v1.5.1 // indirect github.com/gin-contrib/sse v0.1.0 // indirect - github.com/gin-gonic/gin v1.9.1 // indirect - github.com/go-playground/locales v0.14.1 // indirect - github.com/go-playground/universal-translator v0.18.1 // indirect - github.com/go-playground/validator/v10 v10.14.0 // indirect - github.com/goccy/go-json v0.10.2 // indirect + github.com/go-playground/locales v0.14.0 // indirect + github.com/go-playground/universal-translator v0.18.0 // indirect + github.com/go-playground/validator/v10 v10.10.0 // indirect + github.com/goccy/go-json v0.9.7 // indirect + github.com/gofrs/uuid v3.2.0+incompatible // indirect + github.com/google/uuid v1.3.0 // indirect github.com/hashicorp/hcl v1.0.0 // indirect - github.com/jackc/pgpassfile v1.0.0 // indirect - github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a // indirect - github.com/jackc/pgx/v5 v5.3.1 // indirect - github.com/jinzhu/inflection v1.0.0 // indirect - github.com/jinzhu/now v1.1.5 // indirect + github.com/huandu/xstrings v1.3.2 // indirect + github.com/inconshreveable/mousetrap v1.0.0 // indirect github.com/json-iterator/go v1.1.12 // indirect - github.com/klauspost/cpuid/v2 v2.2.4 // indirect - github.com/leodido/go-urn v1.2.4 // indirect - github.com/magiconair/properties v1.8.7 // indirect - github.com/mattn/go-isatty v0.0.19 // indirect - github.com/mitchellh/mapstructure v1.5.0 // indirect - github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect + github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 // indirect + github.com/leodido/go-urn v1.2.1 // indirect + github.com/lib/pq v1.2.1-0.20191011153232-f91d3411e481 // indirect + github.com/magiconair/properties v1.8.5 // indirect + github.com/mattn/go-isatty v0.0.14 // indirect + github.com/mitchellh/mapstructure v1.4.2 // indirect + github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421 // indirect github.com/modern-go/reflect2 v1.0.2 // indirect - github.com/pelletier/go-toml/v2 v2.0.8 // indirect - github.com/spf13/afero v1.9.5 // indirect - github.com/spf13/cast v1.5.1 // indirect + github.com/pelletier/go-toml v1.9.4 // indirect + github.com/pelletier/go-toml/v2 v2.0.1 // indirect + github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0 // indirect + github.com/spf13/afero v1.6.0 // indirect + github.com/spf13/cast v1.4.1 // indirect github.com/spf13/jwalterweatherman v1.1.0 // indirect github.com/spf13/pflag v1.0.5 // indirect - github.com/subosito/gotenv v1.4.2 // indirect - github.com/twitchyliquid64/golang-asm v0.15.1 // indirect - github.com/ugorji/go/codec v1.2.11 // indirect - golang.org/x/arch v0.3.0 // indirect - golang.org/x/crypto v0.9.0 // indirect - golang.org/x/net v0.10.0 // indirect - golang.org/x/sys v0.8.0 // indirect - golang.org/x/text v0.9.0 // indirect - google.golang.org/protobuf v1.30.0 // indirect - gopkg.in/ini.v1 v1.67.0 // indirect - gopkg.in/yaml.v3 v3.0.1 // indirect + github.com/subosito/gotenv v1.2.0 // indirect + github.com/ugorji/go/codec v1.2.7 // indirect + github.com/volatiletech/inflect v0.0.1 // indirect + github.com/volatiletech/randomize v0.0.1 // indirect + golang.org/x/crypto v0.0.0-20210817164053-32db794688a5 // indirect + golang.org/x/mod v0.4.2 // indirect + golang.org/x/net v0.0.0-20210614182718-04defd469f4e // indirect + golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac // indirect + golang.org/x/text v0.3.6 // indirect + golang.org/x/tools v0.1.5 // indirect + golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect + google.golang.org/protobuf v1.28.0 // indirect + gopkg.in/ini.v1 v1.63.2 // indirect + gopkg.in/yaml.v2 v2.4.0 // indirect + lukechampine.com/uint128 v1.1.1 // indirect + modernc.org/cc/v3 v3.36.0 // indirect + modernc.org/ccgo/v3 v3.16.6 // indirect + modernc.org/libc v1.16.7 // indirect + modernc.org/mathutil v1.4.1 // indirect + modernc.org/memory v1.1.1 // indirect + modernc.org/opt v0.1.1 // indirect + modernc.org/strutil v1.1.1 // indirect + modernc.org/token v1.0.0 // indirect ) diff --git a/go.sum b/go.sum index f2d42ad..9c597d4 100644 --- a/go.sum +++ b/go.sum @@ -3,7 +3,6 @@ cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMT cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= -cloud.google.com/go v0.44.3/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= @@ -16,7 +15,14 @@ cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOY cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= -cloud.google.com/go v0.75.0/go.mod h1:VGuuCn7PG0dwsd5XPVm2Mm3wlh3EL55/79EKB6hlPTY= +cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg= +cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8= +cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0= +cloud.google.com/go v0.83.0/go.mod h1:Z7MJUsANfY0pYPdw0lbnivPx4/vhy/e2FEkSkF7vAVY= +cloud.google.com/go v0.84.0/go.mod h1:RazrYuxIK6Kb7YrzzhPoLmCVzl7Sup4NrbKPg8KHSUM= +cloud.google.com/go v0.87.0/go.mod h1:TpDYlFy7vuLzZMMZ+B6iRiELaY7z/gJPaqbMx6mlWcY= +cloud.google.com/go v0.90.0/go.mod h1:kRX0mNRHe0e2rC6oNakvwQqzyDmg57xJ+SZU1eT2aDQ= +cloud.google.com/go v0.93.3/go.mod h1:8utlLll2EF5XMAV15woO4lSbWQlk8rer9aLOfLh7+YI= cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= @@ -25,6 +31,8 @@ cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4g cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= +cloud.google.com/go/firestore v1.1.0/go.mod h1:ulACoGHTpvq5r8rxGJ4ddJZBZqakUQqClKRT5SZwBmk= +cloud.google.com/go/firestore v1.6.0/go.mod h1:afJwI0vaXwAG54kI7A//lP/lSPDkQORQuMkv56TxEPU= cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= @@ -34,17 +42,29 @@ cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0Zeo cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= -cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= -github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1O2AihPM= -github.com/bytedance/sonic v1.9.1 h1:6iJ6NqdoxCDr6mbY8h18oSO+cShGSMRGCEo7F2h0x8s= -github.com/bytedance/sonic v1.9.1/go.mod h1:i736AoUSYt75HyZLoJW9ERYxcy6eaN6h4BZXU064P/U= +github.com/DATA-DOG/go-sqlmock v1.4.1 h1:ThlnYciV1iM/V0OSF/dtkqWb6xo5qITT1TJBG1MRDJM= +github.com/DATA-DOG/go-sqlmock v1.4.1/go.mod h1:f/Ixk793poVmq4qj/V1dPUg2JEAKC73Q5eFN3EC/SaM= +github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU= +github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs= +github.com/Masterminds/sprig/v3 v3.2.2/go.mod h1:UoaO7Yp8KlPnJIYWTFkMaqPUYKTfGFPhxNuwnnxkKlk= +github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= +github.com/PuerkitoBio/goquery v1.7.1 h1:oE+T06D+1T7LNrn91B4aERsRIeCLJ/oPSa6xB9FPnz4= +github.com/PuerkitoBio/goquery v1.7.1/go.mod h1:XY0pP4kfraEmmV1O7Uf6XyjoslwsneBbgeDjLYuN8xY= +github.com/andybalholm/cascadia v1.2.0 h1:vuRCkM5Ozh/BfmsaTm26kbjm0mIOM3yS5Ek/F5h18aE= +github.com/andybalholm/cascadia v1.2.0/go.mod h1:YCyR8vOZT9aZ1CHEd8ap0gMVm2aFgxBp0T0eFw1RUQY= +github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= +github.com/apmckinlay/gsuneido v0.0.0-20180907175622-1f10244968e3/go.mod h1:hJnaqxrCRgMCTWtpNz9XUFkBCREiQdlcyK6YNmOfroM= +github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= +github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= +github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= +github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= +github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= +github.com/bketelsen/crypt v0.0.4/go.mod h1:aI6NrJ0pMGgvZKL1iVgXLnfIFJtfV+bKCoqOes/6LfM= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= -github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY= -github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311 h1:qSGYFH7+jGhDF8vLC+iwCD4WpbV1EBDSzWkJODFLams= -github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk= +github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= @@ -52,37 +72,60 @@ github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDk github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ= +github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= +github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= +github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/denisenkom/go-mssqldb v0.10.0/go.mod h1:xbL0rPBG9cCiLr28tMa8zpbdarY27NDyej4t/EjAShU= +github.com/dustin/go-humanize v1.0.0 h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4zYo= +github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= -github.com/frankban/quicktest v1.14.4 h1:g2rn0vABPOOXmZUj+vbmUp0lPoXEMuhTpIluN0XL9UY= -github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY= -github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw= -github.com/gabriel-vasile/mimetype v1.4.2 h1:w5qFW6JKBz9Y393Y4q372O9A7cUSequkh1Q7OhCmWKU= -github.com/gabriel-vasile/mimetype v1.4.2/go.mod h1:zApsH/mKG4w07erKIaJPFiX0Tsq9BFQgN3qGY5GnNgA= +github.com/ericlagergren/decimal v0.0.0-20181231230500-73749d4874d5 h1:HQGCJNlqt1dUs/BhtEKmqWd6LWS+DWYVxi9+Jo4r0jE= +github.com/ericlagergren/decimal v0.0.0-20181231230500-73749d4874d5/go.mod h1:1yj25TwtUlJ+pfOu9apAVaM1RWfZGg+aFpd4hPQZekQ= +github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= +github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU= +github.com/friendsofgo/errors v0.9.2 h1:X6NYxef4efCBdwI7BgS820zFaN7Cphrmb+Pljdzjtgk= +github.com/friendsofgo/errors v0.9.2/go.mod h1:yCvFW5AkDIL9qn7suHVLiI/gH228n7PC4Pn44IGoTOI= +github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= +github.com/fsnotify/fsnotify v1.5.1 h1:mZcQUHVQUQWoPXXtuf9yuEXKudkV2sx1E06UadKWpgI= +github.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5Ai1i3InKU= +github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE= github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= -github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg= -github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL7YrpYKXt5YId3A/Tnip5kqbEAP+KLuI3SUcPTeU= +github.com/gin-gonic/gin v1.8.0 h1:4WFH5yycBMA3za5Hnl425yd9ymdw1XPm4666oab+hv4= +github.com/gin-gonic/gin v1.8.0/go.mod h1:ji8BvRH1azfM+SYow9zQ6SZMvR8qOMZHmsCuWR9tTTk= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= -github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= -github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= -github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= -github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= -github.com/go-playground/validator/v10 v10.14.0 h1:vgvQWe3XCz3gIeFDm/HnTIbj6UGmg/+t63MyGU2n5js= -github.com/go-playground/validator/v10 v10.14.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= +github.com/go-playground/assert/v2 v2.0.1 h1:MsBgLAaY856+nPRTKrp3/OZK38U/wa0CcBYNjji3q3A= +github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= +github.com/go-playground/locales v0.14.0 h1:u50s323jtVGugKlcYeyzC0etD1HifMjqmJqb8WugfUU= +github.com/go-playground/locales v0.14.0/go.mod h1:sawfccIbzZTqEDETgFXqTho0QybSa7l++s0DH+LDiLs= +github.com/go-playground/universal-translator v0.18.0 h1:82dyy6p4OuJq4/CByFNOn/jYrnRPArHwAcmLoJZxyho= +github.com/go-playground/universal-translator v0.18.0/go.mod h1:UvRDBj+xPUEGrFYl+lu/H90nyDXpg0fqeB/AQUGNTVA= +github.com/go-playground/validator/v10 v10.10.0 h1:I7mrTYv78z8k8VXa/qJlOlEXn/nBh+BF8dHX5nt/dr0= +github.com/go-playground/validator/v10 v10.10.0/go.mod h1:74x4gJWsvQexRdW8Pn3dXSGrTK4nAUsbPlLADvpJkos= github.com/go-sql-driver/mysql v1.5.0 h1:ozyZYNQW3x3HtqT1jira07DN2PArx2v7/mN66gGcHOs= github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= -github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= -github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= +github.com/goccy/go-json v0.9.7 h1:IcB+Aqpx/iMHu5Yooh7jEzJk1JZ7Pjtmys2ukPr7EeM= +github.com/goccy/go-json v0.9.7/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= +github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= +github.com/gofrs/uuid v3.2.0+incompatible h1:y12jRkkFxsd7GpqdSZ+/KCs/fJbqpEXSGd4+jfEaewE= +github.com/gofrs/uuid v3.2.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= @@ -94,6 +137,8 @@ github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= +github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8= +github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= @@ -109,6 +154,9 @@ github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QD github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM= +github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= @@ -119,13 +167,16 @@ github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= +github.com/google/go-cmp v0.5.6 h1:BKbKCqvP6I+rmFHt06ZmyQtvB8xAkWdhFyr0ZUNZcxQ= +github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/martian/v3 v3.2.1/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk= github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= @@ -135,126 +186,231 @@ github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hf github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20201218002935-b9804c9f04c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/google/uuid v1.3.1 h1:KjJaJ9iWZ3jOFZIf1Lqf4laDRCasjl0BCmnEGxkdLb4= -github.com/google/uuid v1.3.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= +github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= -github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g= +github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0= +github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= +github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= +github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q= +github.com/hashicorp/consul/api v1.10.1/go.mod h1:XjsvQN+RJGWI2TWy1/kqaE16HrR2J/FWgkYjdZQsX9M= +github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= +github.com/hashicorp/consul/sdk v0.8.0/go.mod h1:GBvyrGALthsZObzUGsfgHZQDXjg4lOjagTIwIR1vPms= +github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= +github.com/hashicorp/go-hclog v0.12.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= +github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= +github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= +github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= +github.com/hashicorp/go-multierror v1.1.0/go.mod h1:spPvp8C1qA32ftKqdAHm4hHTbPw+vmowP0z+KUhOZdA= +github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU= +github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8= +github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= +github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4= +github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= +github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= +github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ= +github.com/hashicorp/mdns v1.0.1/go.mod h1:4gW7WsVCke5TE7EPeYliwHlRUyBtfCwuFwuMg2DmyNY= +github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I= +github.com/hashicorp/memberlist v0.2.2/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOnAH9VT3Sh9MUE= +github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc= +github.com/hashicorp/serf v0.9.5/go.mod h1:UWDWwZeL5cuWDJdl0C6wrvrUwEqtQ4ZKBKKENpqIUyk= +github.com/huandu/go-assert v1.1.5 h1:fjemmA7sSfYHJD7CUqs9qTwwfdNAx7/j2/ZlHXzNB3c= +github.com/huandu/go-assert v1.1.5/go.mod h1:yOLvuqZwmcHIC5rIzrBhT7D3Q9c3GFnd0JrPVhn/06U= +github.com/huandu/go-sqlbuilder v1.13.0 h1:IN1VRzcyQ+Kx74L0g5ZAY5qDaRJjwMWVmb6GrFAF8Jc= +github.com/huandu/go-sqlbuilder v1.13.0/go.mod h1:LILlbQo0MOYjlIiGgOSR3UcWQpd5Y/oZ7HLNGyAUz0E= +github.com/huandu/xstrings v1.3.1/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= +github.com/huandu/xstrings v1.3.2 h1:L18LIDzqlW6xN2rEkpdV8+oL/IXWJ1APd+vsdYy4Wdw= +github.com/huandu/xstrings v1.3.2/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= -github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= -github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= -github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a h1:bbPeKD0xmW/Y25WS6cokEszi5g+S0QxI/d45PkRi7Nk= -github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM= -github.com/jackc/pgx/v5 v5.3.1 h1:Fcr8QJ1ZeLi5zsPZqQeUZhNhxfkkKBOgJuYkJHoBOtU= -github.com/jackc/pgx/v5 v5.3.1/go.mod h1:t3JDKnCBlYIc0ewLF0Q7B8MXmoIaBOZj/ic7iHozM/8= -github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E= -github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc= -github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ= -github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8= +github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= +github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM= +github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= github.com/jmoiron/sqlx v1.3.4 h1:wv+0IJZfL5z0uZoUjlpKgHkgaFSYD+r9CfrXjEXsO7w= github.com/jmoiron/sqlx v1.3.4/go.mod h1:2BljVx/86SuTyjE+aPYlHCTNvZrnJXghYGpNiXLBMCQ= +github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= +github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= +github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 h1:Z9n2FFNUXsshfwJMBgNA0RU6/i7WVaAegv3PtuIHPMs= +github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8= +github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= -github.com/klauspost/cpuid/v2 v2.2.4 h1:acbojRNwl3o09bUq+yDCtZFc1aiwaAAxtcn8YkZXnvk= -github.com/klauspost/cpuid/v2 v2.2.4/go.mod h1:RVVoqg1df56z8g3pUjL/3lE5UfnlrJX8tyFgg4nqhuY= github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= -github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= +github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= +github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0= +github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= -github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q= -github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/leodido/go-urn v1.2.1 h1:BqpAaACuzVSgi/VLzGZIobT2z4v53pjosyNd9Yv6n/w= +github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY= +github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v1.2.1-0.20191011153232-f91d3411e481 h1:r9fnMM01mkhtfe6QfLrr/90mBVLnJHge2jGeBvApOjk= github.com/lib/pq v1.2.1-0.20191011153232-f91d3411e481/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= -github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY= -github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= -github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA= -github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/magiconair/properties v1.8.5 h1:b6kJs+EmPFMYGkow9GiUyCyOvIwYetYJ3fSaWak/Gls= +github.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= +github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= +github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= +github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= +github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.10/go.mod h1:qgIWMr58cqv1PHHyhnkY9lrL7etaEgOFcMEpPG5Rm84= +github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOAqxQCu2WE= +github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= +github.com/mattn/go-isatty v0.0.14 h1:yVuAays6BHfxijgZPzw+3Zlu5yQgKGP2/hcQbHb7S9Y= +github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= +github.com/mattn/go-sqlite3 v1.14.10/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= github.com/mattn/go-sqlite3 v1.14.12 h1:TJ1bhYJPV44phC+IMu1u2K/i5RriLTPe+yc68XDJ1Z0= github.com/mattn/go-sqlite3 v1.14.12/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= -github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= -github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= +github.com/miekg/dns v1.1.26/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKjuso= +github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc= +github.com/mitchellh/cli v1.1.0/go.mod h1:xcISNoH86gajksDmfB23e/pu+B+GeFRMYmoHXxx3xhI= +github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw= +github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= +github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg= +github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY= +github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mitchellh/mapstructure v1.4.2 h1:6h7AQ0yhTcIsmFmnAwQls75jp2Gzs4iB8W7pjMO+rqo= +github.com/mitchellh/mapstructure v1.4.2/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421 h1:ZqeYNhU3OHLH3mGKHDcjJRFFRrJa6eAM5H+CtDdOsPc= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= -github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= -github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= -github.com/pelletier/go-toml/v2 v2.0.8 h1:0ctb6s9mE31h0/lhu+J6OPmVeDxJn+kYnJc2jZR9tGQ= -github.com/pelletier/go-toml/v2 v2.0.8/go.mod h1:vuYfssBdrU2XDZ9bYydBu6t+6a6PYNcZljzZR9VXg+4= -github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg= +github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= +github.com/pelletier/go-toml v1.9.3/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= +github.com/pelletier/go-toml v1.9.4 h1:tjENF6MfZAg8e4ZmZTeWaWiT2vXtsoO6+iuOjFhECwM= +github.com/pelletier/go-toml v1.9.4/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= +github.com/pelletier/go-toml/v2 v2.0.1 h1:8e3L2cCQzLFi2CR4g7vGFuFxX7Jl1kKX8gW+iV0GUKU= +github.com/pelletier/go-toml/v2 v2.0.1/go.mod h1:r9LEWfGN8R5k0VXJ+0BkIe7MYkRdwZOjgMj2KwnJFUo= +github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= +github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= +github.com/posener/complete v1.2.3/go.mod h1:WZIdtGGp+qx0sLrYKtIRAruyNpv6hFCicSgv7Sy7s/s= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0 h1:OdAsTTz6OkFY5QxjkYwrChwuRruF69c169dPK26NUlk= +github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= +github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= -github.com/rogpeppe/go-internal v1.9.0 h1:73kH8U+JUqXU8lRuOHeVHaa/SZPifC7BkcraZVejAe8= -github.com/spf13/afero v1.9.5 h1:stMpOSZFs//0Lv29HduCmli3GUfpFoF3Y1Q/aXj/wVM= -github.com/spf13/afero v1.9.5/go.mod h1:UBogFpq8E9Hx+xc5CNTTEpTnuHVmXDwZcZcE1eb/UhQ= -github.com/spf13/cast v1.5.1 h1:R+kOtfhWQE6TVQzY+4D7wJLBgkdVasCEFxSUBYBYIlA= -github.com/spf13/cast v1.5.1/go.mod h1:b9PdjNptOpzXr7Rq1q9gJML/2cdGQAo69NKzQ10KN48= +github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= +github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUAtL9R8= +github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE= +github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= +github.com/sagikazarmark/crypt v0.1.0/go.mod h1:B/mN0msZuINBtQ1zZLEQcegFJJf9vnYIR88KRMEuODE= +github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= +github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4= +github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= +github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= +github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= +github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= +github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= +github.com/spf13/afero v1.6.0 h1:xoax2sJ2DT8S8xA2paPFjDCScCNeWsg75VG0DLRreiY= +github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I= +github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= +github.com/spf13/cast v1.4.1 h1:s0hze+J0196ZfEMTs80N7UlFt0BDuQ7Q+JDnHiMWKdA= +github.com/spf13/cast v1.4.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= +github.com/spf13/cobra v1.2.1 h1:+KmjbUw1hriSNMF55oPrkZcb27aECyrj8V2ytv7kWDw= +github.com/spf13/cobra v1.2.1/go.mod h1:ExllRjgxM/piMAM+3tAZvg8fsklGAf3tPfi+i8t68Nk= github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk= github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= -github.com/spf13/viper v1.16.0 h1:rGGH0XDZhdUOryiDWjmIvUSWpbNqisK8Wk0Vyefw8hc= -github.com/spf13/viper v1.16.0/go.mod h1:yg78JgCJcbrQOvV9YLXgkLaZqUidkY9K+Dd1FofRzQg= +github.com/spf13/viper v1.8.1/go.mod h1:o0Pch8wJ9BVSWGQMbra6iw0oQ5oktSIBaujf1rJH9Ns= +github.com/spf13/viper v1.9.0 h1:yR6EXjTp0y0cLN8OZg1CRZmOBdI88UcGkhgyJhu6nZk= +github.com/spf13/viper v1.9.0/go.mod h1:+i6ajR7OX2XaiBkrcZJFK21htRk7eDeLg7+O6bhUPP4= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= -github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1 h1:5TQK59W5E3v0r2duFAb7P95B6hEeOyEnHRa8MjYSMTY= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= -github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= -github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= -github.com/stretchr/testify v1.8.3 h1:RP3t2pwF7cMEbC1dqtB6poj3niw/9gnV4Cjg5oW5gtY= -github.com/stretchr/testify v1.8.3/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= -github.com/subosito/gotenv v1.4.2 h1:X1TuBLAMDFbaTAChgCBLu3DU3UPyELpnF2jjJ2cz/S8= -github.com/subosito/gotenv v1.4.2/go.mod h1:ayKnFf/c6rvx/2iiLrJUk1e6plDbT3edrFNGqEflhK0= -github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI= -github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= -github.com/ugorji/go/codec v1.2.11 h1:BMaWp1Bb6fHwEtbplGBGJ498wD+LKlNSl25MjdZY4dU= -github.com/ugorji/go/codec v1.2.11/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg= +github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s= +github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= +github.com/ugorji/go v1.2.7/go.mod h1:nF9osbDWLy6bDVv/Rtoh6QgnvNDpmCalQV5urGCCS6M= +github.com/ugorji/go/codec v1.2.7 h1:YPXUKf7fYbp/y8xloBqZOw2qaVggbfwMlI8WM3wZUJ0= +github.com/ugorji/go/codec v1.2.7/go.mod h1:WGN1fab3R1fzQlVQTkfxVtIBhWDRqOviHU95kRgeqEY= +github.com/volatiletech/inflect v0.0.1 h1:2a6FcMQyhmPZcLa+uet3VJ8gLn/9svWhJxJYwvE8KsU= +github.com/volatiletech/inflect v0.0.1/go.mod h1:IBti31tG6phkHitLlr5j7shC5SOo//x0AjDzaJU1PLA= +github.com/volatiletech/null/v8 v8.1.2 h1:kiTiX1PpwvuugKwfvUNX/SU/5A2KGZMXfGD0DUHdKEI= +github.com/volatiletech/null/v8 v8.1.2/go.mod h1:98DbwNoKEpRrYtGjWFctievIfm4n4MxG0A6EBUcoS5g= +github.com/volatiletech/randomize v0.0.1 h1:eE5yajattWqTB2/eN8df4dw+8jwAzBtbdo5sbWC4nMk= +github.com/volatiletech/randomize v0.0.1/go.mod h1:GN3U0QYqfZ9FOJ67bzax1cqZ5q2xuj2mXrXBjWaRTlY= +github.com/volatiletech/sqlboiler/v4 v4.11.0 h1:jItTUGIXfCfFiNEGIBZZj4rFMO/gXhjqX03sJ5LiDk8= +github.com/volatiletech/sqlboiler/v4 v4.11.0/go.mod h1:AAaQj77uX6nyU+Q5q6OcVCFFEs/gs+qsthM18/NVemo= +github.com/volatiletech/strmangle v0.0.1/go.mod h1:F6RA6IkB5vq0yTG4GQ0UsbbRcl3ni9P76i+JrTBKFFg= +github.com/volatiletech/strmangle v0.0.4 h1:CxrEPhobZL/PCZOTDSH1aq7s4Kv76hQpRoTVVlUOim4= +github.com/volatiletech/strmangle v0.0.4/go.mod h1:ycDvbDkjDvhC0NUU8w3fWwl5JEMTV56vTKXzR3GeR+0= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +go.etcd.io/etcd/api/v3 v3.5.0/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs= +go.etcd.io/etcd/client/pkg/v3 v3.5.0/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= +go.etcd.io/etcd/client/v2 v2.305.0/go.mod h1:h9puh54ZTgAKtEbut2oe9P4L/oqKCVB6xsXlzd7alYQ= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= -golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= -golang.org/x/arch v0.3.0 h1:02VY4/ZcO/gBOH6PUaoiptASxtXU10jazRCP865E97k= -golang.org/x/arch v0.3.0/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= +go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= +go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= +go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= +go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= +go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo= +golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190325154230-a5d413f7728c/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190923035154-9ee001bba392/go.mod h1:/lpIB1dKB+9EgE3H3cr1v9wB50oz8l4C4h62xy7jSTY= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200414173820-0848c9571904/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= -golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.9.0 h1:LF6fAI+IutBocDJ2OT0Q1g8plpYljMZ4+lty+dsqw3g= -golang.org/x/crypto v0.9.0/go.mod h1:yrmDGqONDYtNj3tH8X9dzUun2m2lzPa9ngI6/RUPGR0= +golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20210817164053-32db794688a5 h1:HWj/xjIHfjYU5nVXpTM0s39J9CbLn7Cc5a7IC5rwsMQ= +golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -278,6 +434,7 @@ golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRu golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= @@ -288,8 +445,13 @@ golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.2 h1:Gz96sIWK3OalVv/I/qNygP42zyoKp3xptRVCWRFEBvo= +golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20180218175443-cbe0f9307d01/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= @@ -300,6 +462,7 @@ golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190923162816-aa69164e4478/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= @@ -316,12 +479,15 @@ golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81R golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.10.0 h1:X2//UzNDwYmtCLn7To6G58Wr6f5ahEAQgKNzv9Y951M= -golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= +golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= +golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= +golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20210614182718-04defd469f4e h1:XpT3nA5TvE525Ne3hInMh6+GETgn27Zfm9dxsThnX2Q= +golang.org/x/net v0.0.0-20210614182718-04defd469f4e/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -331,6 +497,13 @@ golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210402161424-2e8d93401602/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210805134026-6f1e6394065a/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210819190943-2bc19b11175f/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -341,8 +514,12 @@ golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -350,11 +527,18 @@ golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190922100055-0a153f010e69/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190924154521-2837fb4f24fe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200124204421-9fbb57f87de9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -369,18 +553,28 @@ golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201126233918-771906719818/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210603125802-9665404d3644/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220704084225-05e143d24a9e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.8.0 h1:EBmGv8NaZBZTWvrbjNoL6HVt+IVy3QDQpJs7VRIw3tU= -golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210902050250-f475640dd07b/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac h1:oN6lz7iLW/YC7un8pq+9bOLyXrprv2+DKfkJY+2LJJw= +golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -388,10 +582,9 @@ golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3 golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6 h1:aRYxNxv6iGQlyVaZmk6ZgYEDa+Jg18DxebPSrd6bg1M= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= -golang.org/x/text v0.9.0 h1:2sjJmO8cDvYveuX97RDLsxlyUxLl+GHoLxBiRdHllBE= -golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= @@ -401,6 +594,7 @@ golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3 golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= @@ -408,8 +602,10 @@ golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgw golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190907020128-2ca718005c18/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191112195655-aa38f8e97acc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= @@ -432,19 +628,28 @@ golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roY golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201124115921-2c860bdd6e78/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= +golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.5 h1:ouewzE6p+/VEB31YYnTbEJdi8pFqKp4P4n85vwo3DHA= +golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= @@ -465,6 +670,15 @@ google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz513 google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= +google.golang.org/api v0.41.0/go.mod h1:RkxM5lITDfTzmyKFPt+wGrCJbVfniCr2ool8kTBzRTU= +google.golang.org/api v0.43.0/go.mod h1:nQsDGjRXMo4lvh5hP0TKqF244gqhGcr/YSIykhUk/94= +google.golang.org/api v0.44.0/go.mod h1:EBOGZqzyhtvMDoxwS97ctnh0zUmYY6CxqXsc1AvkYD8= +google.golang.org/api v0.47.0/go.mod h1:Wbvgpq1HddcWVtzsVLyfLp8lDg6AA241LmgIL59tHXo= +google.golang.org/api v0.48.0/go.mod h1:71Pr1vy+TAZRPkPs/xlCf5SsU8WjuAWv1Pfjbtukyy4= +google.golang.org/api v0.50.0/go.mod h1:4bNT5pAuq5ji4SRZm+5QIkjny9JAyVD/3gaSihNefaw= +google.golang.org/api v0.51.0/go.mod h1:t4HdrdoNgyN5cbEfm7Lum0lcLDLiise1F8qDKX00sOU= +google.golang.org/api v0.54.0/go.mod h1:7C4bFFOvVDGXjfDTAsgGwDgAxRDeQ4X8NvUedIt6z3k= +google.golang.org/api v0.56.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= @@ -495,6 +709,7 @@ google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfG google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= @@ -506,8 +721,23 @@ google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6D google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210108203827-ffc7fda8c3d7/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210226172003-ab064af71705/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= +google.golang.org/genproto v0.0.0-20210513213006-bf773b8c8384/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A= +google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= +google.golang.org/genproto v0.0.0-20210604141403-392c879c8b08/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= +google.golang.org/genproto v0.0.0-20210608205507-b6d2f5bf0d7d/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= +google.golang.org/genproto v0.0.0-20210624195500-8bfb893ecb84/go.mod h1:SzzZ/N+nwJDaO1kznhnlzqS8ocJICar6hYhVyhi++24= +google.golang.org/genproto v0.0.0-20210713002101-d411969a0d9a/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= +google.golang.org/genproto v0.0.0-20210716133855-ce7ef5c701ea/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= +google.golang.org/genproto v0.0.0-20210728212813-7823e685a01f/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= +google.golang.org/genproto v0.0.0-20210805201207-89edb61ffb67/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= +google.golang.org/genproto v0.0.0-20210813162853-db860fec028c/go.mod h1:cFeNkxwySK631ADgubI+/XFU/xp8FD5KIVV4rj8UC5w= +google.golang.org/genproto v0.0.0-20210821163610-241b8fcbd6c8/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210828152312-66f60bf46e71/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= @@ -521,9 +751,19 @@ google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3Iji google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.37.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= +google.golang.org/grpc v1.37.1/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= +google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= +google.golang.org/grpc v1.39.0/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= +google.golang.org/grpc v1.39.1/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= +google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= +google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= @@ -535,22 +775,29 @@ google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpAD google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= -google.golang.org/protobuf v1.30.0 h1:kPPoIgf3TsEvrm0PFe15JQ+570QVxYzEvvHqChK+cng= -google.golang.org/protobuf v1.30.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.28.0 h1:w43yiav+6bVFTBQFZX0r7ipe9JQ1QsbMgHwbBziscLw= +google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= -gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= -gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= +gopkg.in/ini.v1 v1.62.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/ini.v1 v1.63.2 h1:tGK/CyBg7SMzb60vP1M03vNZ3VDu3wGQJwn7Sxi9r3c= +gopkg.in/ini.v1 v1.63.2/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= -gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gorm.io/driver/postgres v1.5.2 h1:ytTDxxEv+MplXOfFe3Lzm7SjG09fcdb3Z/c056DTBx0= -gorm.io/driver/postgres v1.5.2/go.mod h1:fmpX0m2I1PKuR7mKZiEluwrP3hbs+ps7JIGMUBpCgl8= -gorm.io/gorm v1.25.4 h1:iyNd8fNAe8W9dvtlgeRI5zSVZPsq3OpcTu37cYcpCmw= -gorm.io/gorm v1.25.4/go.mod h1:L4uxeKpfBml98NYqVqwAdmV1a2nBtAec/cf3fpucW/k= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= @@ -558,7 +805,147 @@ honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWh honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +lukechampine.com/uint128 v1.1.1 h1:pnxCASz787iMf+02ssImqk6OLt+Z5QHMoZyUXR4z6JU= +lukechampine.com/uint128 v1.1.1/go.mod h1:c4eWIwlEGaxC/+H1VguhU4PHXNWDCDMUlWdIWl2j1gk= +modernc.org/cc/v3 v3.33.6/go.mod h1:iPJg1pkwXqAV16SNgFBVYmggfMg6xhs+2oiO0vclK3g= +modernc.org/cc/v3 v3.33.9/go.mod h1:iPJg1pkwXqAV16SNgFBVYmggfMg6xhs+2oiO0vclK3g= +modernc.org/cc/v3 v3.33.11/go.mod h1:iPJg1pkwXqAV16SNgFBVYmggfMg6xhs+2oiO0vclK3g= +modernc.org/cc/v3 v3.34.0/go.mod h1:iPJg1pkwXqAV16SNgFBVYmggfMg6xhs+2oiO0vclK3g= +modernc.org/cc/v3 v3.35.0/go.mod h1:iPJg1pkwXqAV16SNgFBVYmggfMg6xhs+2oiO0vclK3g= +modernc.org/cc/v3 v3.35.4/go.mod h1:iPJg1pkwXqAV16SNgFBVYmggfMg6xhs+2oiO0vclK3g= +modernc.org/cc/v3 v3.35.5/go.mod h1:iPJg1pkwXqAV16SNgFBVYmggfMg6xhs+2oiO0vclK3g= +modernc.org/cc/v3 v3.35.7/go.mod h1:iPJg1pkwXqAV16SNgFBVYmggfMg6xhs+2oiO0vclK3g= +modernc.org/cc/v3 v3.35.8/go.mod h1:iPJg1pkwXqAV16SNgFBVYmggfMg6xhs+2oiO0vclK3g= +modernc.org/cc/v3 v3.35.10/go.mod h1:iPJg1pkwXqAV16SNgFBVYmggfMg6xhs+2oiO0vclK3g= +modernc.org/cc/v3 v3.35.15/go.mod h1:iPJg1pkwXqAV16SNgFBVYmggfMg6xhs+2oiO0vclK3g= +modernc.org/cc/v3 v3.35.16/go.mod h1:iPJg1pkwXqAV16SNgFBVYmggfMg6xhs+2oiO0vclK3g= +modernc.org/cc/v3 v3.35.17/go.mod h1:iPJg1pkwXqAV16SNgFBVYmggfMg6xhs+2oiO0vclK3g= +modernc.org/cc/v3 v3.35.18/go.mod h1:iPJg1pkwXqAV16SNgFBVYmggfMg6xhs+2oiO0vclK3g= +modernc.org/cc/v3 v3.35.20/go.mod h1:iPJg1pkwXqAV16SNgFBVYmggfMg6xhs+2oiO0vclK3g= +modernc.org/cc/v3 v3.35.22/go.mod h1:iPJg1pkwXqAV16SNgFBVYmggfMg6xhs+2oiO0vclK3g= +modernc.org/cc/v3 v3.36.0 h1:0kmRkTmqNidmu3c7BNDSdVHCxXCkWLmWmCIVX4LUboo= +modernc.org/cc/v3 v3.36.0/go.mod h1:NFUHyPn4ekoC/JHeZFfZurN6ixxawE1BnVonP/oahEI= +modernc.org/ccgo/v3 v3.0.0-20220428102840-41399a37e894/go.mod h1:eI31LL8EwEBKPpNpA4bU1/i+sKOwOrQy8D87zWUcRZc= +modernc.org/ccgo/v3 v3.0.0-20220430103911-bc99d88307be/go.mod h1:bwdAnOoaIt8Ax9YdWGjxWsdkPcZyRPHqrOvJxaKAKGw= +modernc.org/ccgo/v3 v3.9.5/go.mod h1:umuo2EP2oDSBnD3ckjaVUXMrmeAw8C8OSICVa0iFf60= +modernc.org/ccgo/v3 v3.10.0/go.mod h1:c0yBmkRFi7uW4J7fwx/JiijwOjeAeR2NoSaRVFPmjMw= +modernc.org/ccgo/v3 v3.11.0/go.mod h1:dGNposbDp9TOZ/1KBxghxtUp/bzErD0/0QW4hhSaBMI= +modernc.org/ccgo/v3 v3.11.1/go.mod h1:lWHxfsn13L3f7hgGsGlU28D9eUOf6y3ZYHKoPaKU0ag= +modernc.org/ccgo/v3 v3.11.3/go.mod h1:0oHunRBMBiXOKdaglfMlRPBALQqsfrCKXgw9okQ3GEw= +modernc.org/ccgo/v3 v3.12.4/go.mod h1:Bk+m6m2tsooJchP/Yk5ji56cClmN6R1cqc9o/YtbgBQ= +modernc.org/ccgo/v3 v3.12.6/go.mod h1:0Ji3ruvpFPpz+yu+1m0wk68pdr/LENABhTrDkMDWH6c= +modernc.org/ccgo/v3 v3.12.8/go.mod h1:Hq9keM4ZfjCDuDXxaHptpv9N24JhgBZmUG5q60iLgUo= +modernc.org/ccgo/v3 v3.12.11/go.mod h1:0jVcmyDwDKDGWbcrzQ+xwJjbhZruHtouiBEvDfoIsdg= +modernc.org/ccgo/v3 v3.12.14/go.mod h1:GhTu1k0YCpJSuWwtRAEHAol5W7g1/RRfS4/9hc9vF5I= +modernc.org/ccgo/v3 v3.12.18/go.mod h1:jvg/xVdWWmZACSgOiAhpWpwHWylbJaSzayCqNOJKIhs= +modernc.org/ccgo/v3 v3.12.20/go.mod h1:aKEdssiu7gVgSy/jjMastnv/q6wWGRbszbheXgWRHc8= +modernc.org/ccgo/v3 v3.12.21/go.mod h1:ydgg2tEprnyMn159ZO/N4pLBqpL7NOkJ88GT5zNU2dE= +modernc.org/ccgo/v3 v3.12.22/go.mod h1:nyDVFMmMWhMsgQw+5JH6B6o4MnZ+UQNw1pp52XYFPRk= +modernc.org/ccgo/v3 v3.12.25/go.mod h1:UaLyWI26TwyIT4+ZFNjkyTbsPsY3plAEB6E7L/vZV3w= +modernc.org/ccgo/v3 v3.12.29/go.mod h1:FXVjG7YLf9FetsS2OOYcwNhcdOLGt8S9bQ48+OP75cE= +modernc.org/ccgo/v3 v3.12.36/go.mod h1:uP3/Fiezp/Ga8onfvMLpREq+KUjUmYMxXPO8tETHtA8= +modernc.org/ccgo/v3 v3.12.38/go.mod h1:93O0G7baRST1vNj4wnZ49b1kLxt0xCW5Hsa2qRaZPqc= +modernc.org/ccgo/v3 v3.12.43/go.mod h1:k+DqGXd3o7W+inNujK15S5ZYuPoWYLpF5PYougCmthU= +modernc.org/ccgo/v3 v3.12.46/go.mod h1:UZe6EvMSqOxaJ4sznY7b23/k13R8XNlyWsO5bAmSgOE= +modernc.org/ccgo/v3 v3.12.47/go.mod h1:m8d6p0zNps187fhBwzY/ii6gxfjob1VxWb919Nk1HUk= +modernc.org/ccgo/v3 v3.12.50/go.mod h1:bu9YIwtg+HXQxBhsRDE+cJjQRuINuT9PUK4orOco/JI= +modernc.org/ccgo/v3 v3.12.51/go.mod h1:gaIIlx4YpmGO2bLye04/yeblmvWEmE4BBBls4aJXFiE= +modernc.org/ccgo/v3 v3.12.53/go.mod h1:8xWGGTFkdFEWBEsUmi+DBjwu/WLy3SSOrqEmKUjMeEg= +modernc.org/ccgo/v3 v3.12.54/go.mod h1:yANKFTm9llTFVX1FqNKHE0aMcQb1fuPJx6p8AcUx+74= +modernc.org/ccgo/v3 v3.12.55/go.mod h1:rsXiIyJi9psOwiBkplOaHye5L4MOOaCjHg1Fxkj7IeU= +modernc.org/ccgo/v3 v3.12.56/go.mod h1:ljeFks3faDseCkr60JMpeDb2GSO3TKAmrzm7q9YOcMU= +modernc.org/ccgo/v3 v3.12.57/go.mod h1:hNSF4DNVgBl8wYHpMvPqQWDQx8luqxDnNGCMM4NFNMc= +modernc.org/ccgo/v3 v3.12.60/go.mod h1:k/Nn0zdO1xHVWjPYVshDeWKqbRWIfif5dtsIOCUVMqM= +modernc.org/ccgo/v3 v3.12.66/go.mod h1:jUuxlCFZTUZLMV08s7B1ekHX5+LIAurKTTaugUr/EhQ= +modernc.org/ccgo/v3 v3.12.67/go.mod h1:Bll3KwKvGROizP2Xj17GEGOTrlvB1XcVaBrC90ORO84= +modernc.org/ccgo/v3 v3.12.73/go.mod h1:hngkB+nUUqzOf3iqsM48Gf1FZhY599qzVg1iX+BT3cQ= +modernc.org/ccgo/v3 v3.12.81/go.mod h1:p2A1duHoBBg1mFtYvnhAnQyI6vL0uw5PGYLSIgF6rYY= +modernc.org/ccgo/v3 v3.12.84/go.mod h1:ApbflUfa5BKadjHynCficldU1ghjen84tuM5jRynB7w= +modernc.org/ccgo/v3 v3.12.86/go.mod h1:dN7S26DLTgVSni1PVA3KxxHTcykyDurf3OgUzNqTSrU= +modernc.org/ccgo/v3 v3.12.90/go.mod h1:obhSc3CdivCRpYZmrvO88TXlW0NvoSVvdh/ccRjJYko= +modernc.org/ccgo/v3 v3.12.92/go.mod h1:5yDdN7ti9KWPi5bRVWPl8UNhpEAtCjuEE7ayQnzzqHA= +modernc.org/ccgo/v3 v3.13.1/go.mod h1:aBYVOUfIlcSnrsRVU8VRS35y2DIfpgkmVkYZ0tpIXi4= +modernc.org/ccgo/v3 v3.14.0/go.mod h1:hBrkiBlUwvr5vV/ZH9YzXIp982jKE8Ek8tR1ytoAL6Q= +modernc.org/ccgo/v3 v3.15.1/go.mod h1:md59wBwDT2LznX/OTCPoVS6KIsdRgY8xqQwBV+hkTH0= +modernc.org/ccgo/v3 v3.16.4/go.mod h1:tGtX0gE9Jn7hdZFeU88slbTh1UtCYKusWOoCJuvkWsQ= +modernc.org/ccgo/v3 v3.16.6 h1:3l18poV+iUemQ98O3X5OMr97LOqlzis+ytivU4NqGhA= +modernc.org/ccgo/v3 v3.16.6/go.mod h1:tGtX0gE9Jn7hdZFeU88slbTh1UtCYKusWOoCJuvkWsQ= +modernc.org/ccorpus v1.11.1/go.mod h1:2gEUTrWqdpH2pXsmTM1ZkjeSrUWDpjMu2T6m29L/ErQ= +modernc.org/ccorpus v1.11.6 h1:J16RXiiqiCgua6+ZvQot4yUuUy8zxgqbqEEUuGPlISk= +modernc.org/ccorpus v1.11.6/go.mod h1:2gEUTrWqdpH2pXsmTM1ZkjeSrUWDpjMu2T6m29L/ErQ= +modernc.org/httpfs v1.0.6 h1:AAgIpFZRXuYnkjftxTAZwMIiwEqAfk8aVB2/oA6nAeM= +modernc.org/httpfs v1.0.6/go.mod h1:7dosgurJGp0sPaRanU53W4xZYKh14wfzX420oZADeHM= +modernc.org/libc v0.0.0-20220428101251-2d5f3daf273b/go.mod h1:p7Mg4+koNjc8jkqwcoFBJx7tXkpj00G77X7A72jXPXA= +modernc.org/libc v1.9.8/go.mod h1:U1eq8YWr/Kc1RWCMFUWEdkTg8OTcfLw2kY8EDwl039w= +modernc.org/libc v1.9.11/go.mod h1:NyF3tsA5ArIjJ83XB0JlqhjTabTCHm9aX4XMPHyQn0Q= +modernc.org/libc v1.11.0/go.mod h1:2lOfPmj7cz+g1MrPNmX65QCzVxgNq2C5o0jdLY2gAYg= +modernc.org/libc v1.11.2/go.mod h1:ioIyrl3ETkugDO3SGZ+6EOKvlP3zSOycUETe4XM4n8M= +modernc.org/libc v1.11.5/go.mod h1:k3HDCP95A6U111Q5TmG3nAyUcp3kR5YFZTeDS9v8vSU= +modernc.org/libc v1.11.6/go.mod h1:ddqmzR6p5i4jIGK1d/EiSw97LBcE3dK24QEwCFvgNgE= +modernc.org/libc v1.11.11/go.mod h1:lXEp9QOOk4qAYOtL3BmMve99S5Owz7Qyowzvg6LiZso= +modernc.org/libc v1.11.13/go.mod h1:ZYawJWlXIzXy2Pzghaf7YfM8OKacP3eZQI81PDLFdY8= +modernc.org/libc v1.11.16/go.mod h1:+DJquzYi+DMRUtWI1YNxrlQO6TcA5+dRRiq8HWBWRC8= +modernc.org/libc v1.11.19/go.mod h1:e0dgEame6mkydy19KKaVPBeEnyJB4LGNb0bBH1EtQ3I= +modernc.org/libc v1.11.24/go.mod h1:FOSzE0UwookyT1TtCJrRkvsOrX2k38HoInhw+cSCUGk= +modernc.org/libc v1.11.26/go.mod h1:SFjnYi9OSd2W7f4ct622o/PAYqk7KHv6GS8NZULIjKY= +modernc.org/libc v1.11.27/go.mod h1:zmWm6kcFXt/jpzeCgfvUNswM0qke8qVwxqZrnddlDiE= +modernc.org/libc v1.11.28/go.mod h1:Ii4V0fTFcbq3qrv3CNn+OGHAvzqMBvC7dBNyC4vHZlg= +modernc.org/libc v1.11.31/go.mod h1:FpBncUkEAtopRNJj8aRo29qUiyx5AvAlAxzlx9GNaVM= +modernc.org/libc v1.11.34/go.mod h1:+Tzc4hnb1iaX/SKAutJmfzES6awxfU1BPvrrJO0pYLg= +modernc.org/libc v1.11.37/go.mod h1:dCQebOwoO1046yTrfUE5nX1f3YpGZQKNcITUYWlrAWo= +modernc.org/libc v1.11.39/go.mod h1:mV8lJMo2S5A31uD0k1cMu7vrJbSA3J3waQJxpV4iqx8= +modernc.org/libc v1.11.42/go.mod h1:yzrLDU+sSjLE+D4bIhS7q1L5UwXDOw99PLSX0BlZvSQ= +modernc.org/libc v1.11.44/go.mod h1:KFq33jsma7F5WXiYelU8quMJasCCTnHK0mkri4yPHgA= +modernc.org/libc v1.11.45/go.mod h1:Y192orvfVQQYFzCNsn+Xt0Hxt4DiO4USpLNXBlXg/tM= +modernc.org/libc v1.11.47/go.mod h1:tPkE4PzCTW27E6AIKIR5IwHAQKCAtudEIeAV1/SiyBg= +modernc.org/libc v1.11.49/go.mod h1:9JrJuK5WTtoTWIFQ7QjX2Mb/bagYdZdscI3xrvHbXjE= +modernc.org/libc v1.11.51/go.mod h1:R9I8u9TS+meaWLdbfQhq2kFknTW0O3aw3kEMqDDxMaM= +modernc.org/libc v1.11.53/go.mod h1:5ip5vWYPAoMulkQ5XlSJTy12Sz5U6blOQiYasilVPsU= +modernc.org/libc v1.11.54/go.mod h1:S/FVnskbzVUrjfBqlGFIPA5m7UwB3n9fojHhCNfSsnw= +modernc.org/libc v1.11.55/go.mod h1:j2A5YBRm6HjNkoSs/fzZrSxCuwWqcMYTDPLNx0URn3M= +modernc.org/libc v1.11.56/go.mod h1:pakHkg5JdMLt2OgRadpPOTnyRXm/uzu+Yyg/LSLdi18= +modernc.org/libc v1.11.58/go.mod h1:ns94Rxv0OWyoQrDqMFfWwka2BcaF6/61CqJRK9LP7S8= +modernc.org/libc v1.11.71/go.mod h1:DUOmMYe+IvKi9n6Mycyx3DbjfzSKrdr/0Vgt3j7P5gw= +modernc.org/libc v1.11.75/go.mod h1:dGRVugT6edz361wmD9gk6ax1AbDSe0x5vji0dGJiPT0= +modernc.org/libc v1.11.82/go.mod h1:NF+Ek1BOl2jeC7lw3a7Jj5PWyHPwWD4aq3wVKxqV1fI= +modernc.org/libc v1.11.86/go.mod h1:ePuYgoQLmvxdNT06RpGnaDKJmDNEkV7ZPKI2jnsvZoE= +modernc.org/libc v1.11.87/go.mod h1:Qvd5iXTeLhI5PS0XSyqMY99282y+3euapQFxM7jYnpY= +modernc.org/libc v1.11.88/go.mod h1:h3oIVe8dxmTcchcFuCcJ4nAWaoiwzKCdv82MM0oiIdQ= +modernc.org/libc v1.11.98/go.mod h1:ynK5sbjsU77AP+nn61+k+wxUGRx9rOFcIqWYYMaDZ4c= +modernc.org/libc v1.11.101/go.mod h1:wLLYgEiY2D17NbBOEp+mIJJJBGSiy7fLL4ZrGGZ+8jI= +modernc.org/libc v1.12.0/go.mod h1:2MH3DaF/gCU8i/UBiVE1VFRos4o523M7zipmwH8SIgQ= +modernc.org/libc v1.13.1/go.mod h1:npFeGWjmZTjFeWALQLrvklVmAxv4m80jnG3+xI8FdJk= +modernc.org/libc v1.13.2/go.mod h1:npFeGWjmZTjFeWALQLrvklVmAxv4m80jnG3+xI8FdJk= +modernc.org/libc v1.14.1/go.mod h1:npFeGWjmZTjFeWALQLrvklVmAxv4m80jnG3+xI8FdJk= +modernc.org/libc v1.16.0/go.mod h1:N4LD6DBE9cf+Dzf9buBlzVJndKr/iJHG97vGLHYnb5A= +modernc.org/libc v1.16.1/go.mod h1:JjJE0eu4yeK7tab2n4S1w8tlWd9MxXLRzheaRnAKymU= +modernc.org/libc v1.16.7 h1:qzQtHhsZNpVPpeCu+aMIQldXeV1P0vRhSqCL0nOIJOA= +modernc.org/libc v1.16.7/go.mod h1:hYIV5VZczAmGZAnG15Vdngn5HSF5cSkbvfz2B7GRuVU= +modernc.org/mathutil v1.1.1/go.mod h1:mZW8CKdRPY1v87qxC/wUdX5O1qDzXMP5TH3wjfpga6E= +modernc.org/mathutil v1.2.2/go.mod h1:mZW8CKdRPY1v87qxC/wUdX5O1qDzXMP5TH3wjfpga6E= +modernc.org/mathutil v1.4.0/go.mod h1:mZW8CKdRPY1v87qxC/wUdX5O1qDzXMP5TH3wjfpga6E= +modernc.org/mathutil v1.4.1 h1:ij3fYGe8zBF4Vu+g0oT7mB06r8sqGWKuJu1yXeR4by8= +modernc.org/mathutil v1.4.1/go.mod h1:mZW8CKdRPY1v87qxC/wUdX5O1qDzXMP5TH3wjfpga6E= +modernc.org/memory v1.0.4/go.mod h1:nV2OApxradM3/OVbs2/0OsP6nPfakXpi50C7dcoHXlc= +modernc.org/memory v1.0.5/go.mod h1:B7OYswTRnfGg+4tDH1t1OeUNnsy2viGTdME4tzd+IjM= +modernc.org/memory v1.1.1 h1:bDOL0DIDLQv7bWhP3gMvIrnoFw+Eo6F7a2QK9HPDiFU= +modernc.org/memory v1.1.1/go.mod h1:/0wo5ibyrQiaoUoH7f9D8dnglAmILJ5/cxZlRECf+Nw= +modernc.org/opt v0.1.1 h1:/0RX92k9vwVeDXj+Xn23DKp2VJubL7k8qNffND6qn3A= +modernc.org/opt v0.1.1/go.mod h1:WdSiB5evDcignE70guQKxYUl14mgWtbClRi5wmkkTX0= +modernc.org/sqlite v1.14.5/go.mod h1:YyX5Rx0WbXokitdWl2GJIDy4BrPxBP0PwwhpXOHCDLE= +modernc.org/sqlite v1.17.3 h1:iE+coC5g17LtByDYDWKpR6m2Z9022YrSh3bumwOnIrI= +modernc.org/sqlite v1.17.3/go.mod h1:10hPVYar9C0kfXuTWGz8s0XtB8uAGymUy51ZzStYe3k= +modernc.org/strutil v1.1.1 h1:xv+J1BXY3Opl2ALrBwyfEikFAj8pmqcpnfmuwUwcozs= +modernc.org/strutil v1.1.1/go.mod h1:DE+MQQ/hjKBZS2zNInV5hhcipt5rLPWkmpbGeW5mmdw= +modernc.org/tcl v1.10.0/go.mod h1:WzWapmP/7dHVhFoyPpEaNSVTL8xtewhouN/cqSJ5A2s= +modernc.org/tcl v1.13.1 h1:npxzTwFTZYM8ghWicVIX1cRWzj7Nd8i6AqqX2p+IYao= +modernc.org/tcl v1.13.1/go.mod h1:XOLfOwzhkljL4itZkK6T72ckMgvj0BDsnKNdZVUOecw= +modernc.org/token v1.0.0 h1:a0jaWiNMDhDUtqOj09wvjWWAqd3q7WpBulmL9H2egsk= +modernc.org/token v1.0.0/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM= +modernc.org/z v1.2.21/go.mod h1:uXrObx4pGqXWIMliC5MiKuwAyMrltzwpteOFUP1PWCc= +modernc.org/z v1.3.0/go.mod h1:+mvgLH814oDjtATDdT3rs84JnUIpkvAF5B8AVkNlE2g= +modernc.org/z v1.5.1 h1:RTNHdsrOpeoSeOF4FbzTo8gBYByaJ5xT7NgZ9ZqRiJM= +modernc.org/z v1.5.1/go.mod h1:eWFB510QWW5Th9YGZT81s+LwvaAs3Q2yr4sP0rmLkv8= rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= -rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4= rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= diff --git a/initializers/connectDB.go b/initializers/connectDB.go deleted file mode 100644 index 4c7947a..0000000 --- a/initializers/connectDB.go +++ /dev/null @@ -1,22 +0,0 @@ -package initializers - -import ( - "fmt" - "log" - - "gorm.io/driver/postgres" - "gorm.io/gorm" -) - -var DB *gorm.DB - -func ConnectDB(config *Config) { - var err error - dsn := fmt.Sprintf("host=%s user=%s password=%s dbname=%s port=%s sslmode=disable TimeZone=Asia/Shanghai", config.DBHost, config.DBUserName, config.DBUserPassword, config.DBName, config.DBPort) - - DB, err = gorm.Open(postgres.Open(dsn), &gorm.Config{}) - if err != nil { - log.Fatal("Failed to connect to the Database") - } - fmt.Println("? Connected Successfully to the Database") -} diff --git a/initializers/loadEnv.go b/initializers/loadEnv.go deleted file mode 100644 index c25fb54..0000000 --- a/initializers/loadEnv.go +++ /dev/null @@ -1,32 +0,0 @@ -package initializers - -import ( - "github.com/spf13/viper" -) - -type Config struct { - DBHost string `mapstructure:"POSTGRES_HOST"` - DBUserName string `mapstructure:"POSTGRES_USER"` - DBUserPassword string `mapstructure:"POSTGRES_PASSWORD"` - DBName string `mapstructure:"POSTGRES_DB"` - DBPort string `mapstructure:"POSTGRES_PORT"` - ServerPort string `mapstructure:"PORT"` - - ClientOrigin string `mapstructure:"CLIENT_ORIGIN"` -} - -func LoadConfig(path string) (config Config, err error) { - viper.AddConfigPath(path) - viper.SetConfigType("env") - viper.SetConfigName("app") - - viper.AutomaticEnv() - - err = viper.ReadInConfig() - if err != nil { - return - } - - err = viper.Unmarshal(&config) - return -} diff --git a/main.go b/main.go index aa018cf..1e04c5d 100644 --- a/main.go +++ b/main.go @@ -1,39 +1,5 @@ package main -import ( - "log" - "net/http" - - "github.com/bebop/ark/initializers" - "github.com/gin-gonic/gin" -) - -var ( - server *gin.Engine -) - -func init() { - config, err := initializers.LoadConfig(".devcontainer") - if err != nil { - log.Fatal("? Could not load environment variables", err) - } - - initializers.ConnectDB(&config) - - server = gin.Default() -} - func main() { - config, err := initializers.LoadConfig(".devcontainer") - if err != nil { - log.Fatal("? Could not load environment variables", err) - } - - router := server.Group("/api") - router.GET("/healthchecker", func(ctx *gin.Context) { - message := "Welcome to Golang with Gorm and Postgres" - ctx.JSON(http.StatusOK, gin.H{"status": "success", "message": message}) - }) - - log.Fatal(server.Run(":" + config.ServerPort)) + // cmd.Execute() } diff --git a/migrate/migrate.go b/migrate/migrate.go deleted file mode 100644 index edd3adc..0000000 --- a/migrate/migrate.go +++ /dev/null @@ -1,23 +0,0 @@ -package main - -import ( - "fmt" - "log" - - "github.com/bebop/ark/initializers" - "github.com/bebop/ark/models" -) - -func init() { - config, err := initializers.LoadConfig(".devcontainer") - if err != nil { - log.Fatal("? Could not load environment variables", err) - } - - initializers.ConnectDB(&config) -} - -func main() { - initializers.DB.AutoMigrate(&models.User{}) - fmt.Println("? Migration complete") -} diff --git a/models/action_type.go b/models/action_type.go new file mode 100644 index 0000000..6650556 --- /dev/null +++ b/models/action_type.go @@ -0,0 +1,1194 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// ActionType is an object representing the database table. +type ActionType struct { + ActionType string `boil:"action_type" json:"action_type" toml:"action_type" yaml:"action_type"` + Description string `boil:"description" json:"description" toml:"description" yaml:"description"` + ParentType null.String `boil:"parent_type" json:"parent_type,omitempty" toml:"parent_type" yaml:"parent_type,omitempty"` + + R *actionTypeR `boil:"-" json:"-" toml:"-" yaml:"-"` + L actionTypeL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var ActionTypeColumns = struct { + ActionType string + Description string + ParentType string +}{ + ActionType: "action_type", + Description: "description", + ParentType: "parent_type", +} + +var ActionTypeTableColumns = struct { + ActionType string + Description string + ParentType string +}{ + ActionType: "action_type.action_type", + Description: "action_type.description", + ParentType: "action_type.parent_type", +} + +// Generated where + +type whereHelperstring struct{ field string } + +func (w whereHelperstring) EQ(x string) qm.QueryMod { return qmhelper.Where(w.field, qmhelper.EQ, x) } +func (w whereHelperstring) NEQ(x string) qm.QueryMod { return qmhelper.Where(w.field, qmhelper.NEQ, x) } +func (w whereHelperstring) LT(x string) qm.QueryMod { return qmhelper.Where(w.field, qmhelper.LT, x) } +func (w whereHelperstring) LTE(x string) qm.QueryMod { return qmhelper.Where(w.field, qmhelper.LTE, x) } +func (w whereHelperstring) GT(x string) qm.QueryMod { return qmhelper.Where(w.field, qmhelper.GT, x) } +func (w whereHelperstring) GTE(x string) qm.QueryMod { return qmhelper.Where(w.field, qmhelper.GTE, x) } +func (w whereHelperstring) IN(slice []string) qm.QueryMod { + values := make([]interface{}, 0, len(slice)) + for _, value := range slice { + values = append(values, value) + } + return qm.WhereIn(fmt.Sprintf("%s IN ?", w.field), values...) +} +func (w whereHelperstring) NIN(slice []string) qm.QueryMod { + values := make([]interface{}, 0, len(slice)) + for _, value := range slice { + values = append(values, value) + } + return qm.WhereNotIn(fmt.Sprintf("%s NOT IN ?", w.field), values...) +} + +type whereHelpernull_String struct{ field string } + +func (w whereHelpernull_String) EQ(x null.String) qm.QueryMod { + return qmhelper.WhereNullEQ(w.field, false, x) +} +func (w whereHelpernull_String) NEQ(x null.String) qm.QueryMod { + return qmhelper.WhereNullEQ(w.field, true, x) +} +func (w whereHelpernull_String) LT(x null.String) qm.QueryMod { + return qmhelper.Where(w.field, qmhelper.LT, x) +} +func (w whereHelpernull_String) LTE(x null.String) qm.QueryMod { + return qmhelper.Where(w.field, qmhelper.LTE, x) +} +func (w whereHelpernull_String) GT(x null.String) qm.QueryMod { + return qmhelper.Where(w.field, qmhelper.GT, x) +} +func (w whereHelpernull_String) GTE(x null.String) qm.QueryMod { + return qmhelper.Where(w.field, qmhelper.GTE, x) +} + +func (w whereHelpernull_String) IsNull() qm.QueryMod { return qmhelper.WhereIsNull(w.field) } +func (w whereHelpernull_String) IsNotNull() qm.QueryMod { return qmhelper.WhereIsNotNull(w.field) } + +var ActionTypeWhere = struct { + ActionType whereHelperstring + Description whereHelperstring + ParentType whereHelpernull_String +}{ + ActionType: whereHelperstring{field: "\"action_type\".\"action_type\""}, + Description: whereHelperstring{field: "\"action_type\".\"description\""}, + ParentType: whereHelpernull_String{field: "\"action_type\".\"parent_type\""}, +} + +// ActionTypeRels is where relationship names are stored. +var ActionTypeRels = struct { + DrugMechanisms string +}{ + DrugMechanisms: "DrugMechanisms", +} + +// actionTypeR is where relationships are stored. +type actionTypeR struct { + DrugMechanisms DrugMechanismSlice `boil:"DrugMechanisms" json:"DrugMechanisms" toml:"DrugMechanisms" yaml:"DrugMechanisms"` +} + +// NewStruct creates a new relationship struct +func (*actionTypeR) NewStruct() *actionTypeR { + return &actionTypeR{} +} + +func (r *actionTypeR) GetDrugMechanisms() DrugMechanismSlice { + if r == nil { + return nil + } + return r.DrugMechanisms +} + +// actionTypeL is where Load methods for each relationship are stored. +type actionTypeL struct{} + +var ( + actionTypeAllColumns = []string{"action_type", "description", "parent_type"} + actionTypeColumnsWithoutDefault = []string{"action_type", "description"} + actionTypeColumnsWithDefault = []string{"parent_type"} + actionTypePrimaryKeyColumns = []string{"action_type"} + actionTypeGeneratedColumns = []string{} +) + +type ( + // ActionTypeSlice is an alias for a slice of pointers to ActionType. + // This should almost always be used instead of []ActionType. + ActionTypeSlice []*ActionType + // ActionTypeHook is the signature for custom ActionType hook methods + ActionTypeHook func(context.Context, boil.ContextExecutor, *ActionType) error + + actionTypeQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + actionTypeType = reflect.TypeOf(&ActionType{}) + actionTypeMapping = queries.MakeStructMapping(actionTypeType) + actionTypePrimaryKeyMapping, _ = queries.BindMapping(actionTypeType, actionTypeMapping, actionTypePrimaryKeyColumns) + actionTypeInsertCacheMut sync.RWMutex + actionTypeInsertCache = make(map[string]insertCache) + actionTypeUpdateCacheMut sync.RWMutex + actionTypeUpdateCache = make(map[string]updateCache) + actionTypeUpsertCacheMut sync.RWMutex + actionTypeUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var actionTypeAfterSelectHooks []ActionTypeHook + +var actionTypeBeforeInsertHooks []ActionTypeHook +var actionTypeAfterInsertHooks []ActionTypeHook + +var actionTypeBeforeUpdateHooks []ActionTypeHook +var actionTypeAfterUpdateHooks []ActionTypeHook + +var actionTypeBeforeDeleteHooks []ActionTypeHook +var actionTypeAfterDeleteHooks []ActionTypeHook + +var actionTypeBeforeUpsertHooks []ActionTypeHook +var actionTypeAfterUpsertHooks []ActionTypeHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *ActionType) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range actionTypeAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *ActionType) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range actionTypeBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *ActionType) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range actionTypeAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *ActionType) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range actionTypeBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *ActionType) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range actionTypeAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *ActionType) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range actionTypeBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *ActionType) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range actionTypeAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *ActionType) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range actionTypeBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *ActionType) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range actionTypeAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddActionTypeHook registers your hook function for all future operations. +func AddActionTypeHook(hookPoint boil.HookPoint, actionTypeHook ActionTypeHook) { + switch hookPoint { + case boil.AfterSelectHook: + actionTypeAfterSelectHooks = append(actionTypeAfterSelectHooks, actionTypeHook) + case boil.BeforeInsertHook: + actionTypeBeforeInsertHooks = append(actionTypeBeforeInsertHooks, actionTypeHook) + case boil.AfterInsertHook: + actionTypeAfterInsertHooks = append(actionTypeAfterInsertHooks, actionTypeHook) + case boil.BeforeUpdateHook: + actionTypeBeforeUpdateHooks = append(actionTypeBeforeUpdateHooks, actionTypeHook) + case boil.AfterUpdateHook: + actionTypeAfterUpdateHooks = append(actionTypeAfterUpdateHooks, actionTypeHook) + case boil.BeforeDeleteHook: + actionTypeBeforeDeleteHooks = append(actionTypeBeforeDeleteHooks, actionTypeHook) + case boil.AfterDeleteHook: + actionTypeAfterDeleteHooks = append(actionTypeAfterDeleteHooks, actionTypeHook) + case boil.BeforeUpsertHook: + actionTypeBeforeUpsertHooks = append(actionTypeBeforeUpsertHooks, actionTypeHook) + case boil.AfterUpsertHook: + actionTypeAfterUpsertHooks = append(actionTypeAfterUpsertHooks, actionTypeHook) + } +} + +// One returns a single actionType record from the query. +func (q actionTypeQuery) One(ctx context.Context, exec boil.ContextExecutor) (*ActionType, error) { + o := &ActionType{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for action_type") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all ActionType records from the query. +func (q actionTypeQuery) All(ctx context.Context, exec boil.ContextExecutor) (ActionTypeSlice, error) { + var o []*ActionType + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to ActionType slice") + } + + if len(actionTypeAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all ActionType records in the query. +func (q actionTypeQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count action_type rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q actionTypeQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if action_type exists") + } + + return count > 0, nil +} + +// DrugMechanisms retrieves all the drug_mechanism's DrugMechanisms with an executor. +func (o *ActionType) DrugMechanisms(mods ...qm.QueryMod) drugMechanismQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"drug_mechanism\".\"action_type\"=?", o.ActionType), + ) + + return DrugMechanisms(queryMods...) +} + +// LoadDrugMechanisms allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (actionTypeL) LoadDrugMechanisms(ctx context.Context, e boil.ContextExecutor, singular bool, maybeActionType interface{}, mods queries.Applicator) error { + var slice []*ActionType + var object *ActionType + + if singular { + object = maybeActionType.(*ActionType) + } else { + slice = *maybeActionType.(*[]*ActionType) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &actionTypeR{} + } + args = append(args, object.ActionType) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &actionTypeR{} + } + + for _, a := range args { + if queries.Equal(a, obj.ActionType) { + continue Outer + } + } + + args = append(args, obj.ActionType) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`drug_mechanism`), + qm.WhereIn(`drug_mechanism.action_type in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load drug_mechanism") + } + + var resultSlice []*DrugMechanism + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice drug_mechanism") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on drug_mechanism") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for drug_mechanism") + } + + if len(drugMechanismAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.DrugMechanisms = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &drugMechanismR{} + } + foreign.R.DrugMechanismActionType = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.ActionType, foreign.ActionType) { + local.R.DrugMechanisms = append(local.R.DrugMechanisms, foreign) + if foreign.R == nil { + foreign.R = &drugMechanismR{} + } + foreign.R.DrugMechanismActionType = local + break + } + } + } + + return nil +} + +// AddDrugMechanisms adds the given related objects to the existing relationships +// of the action_type, optionally inserting them as new records. +// Appends related to o.R.DrugMechanisms. +// Sets related.R.DrugMechanismActionType appropriately. +func (o *ActionType) AddDrugMechanisms(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*DrugMechanism) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.ActionType, o.ActionType) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"drug_mechanism\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"action_type"}), + strmangle.WhereClause("\"", "\"", 0, drugMechanismPrimaryKeyColumns), + ) + values := []interface{}{o.ActionType, rel.MecID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.ActionType, o.ActionType) + } + } + + if o.R == nil { + o.R = &actionTypeR{ + DrugMechanisms: related, + } + } else { + o.R.DrugMechanisms = append(o.R.DrugMechanisms, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &drugMechanismR{ + DrugMechanismActionType: o, + } + } else { + rel.R.DrugMechanismActionType = o + } + } + return nil +} + +// SetDrugMechanisms removes all previously related items of the +// action_type replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.DrugMechanismActionType's DrugMechanisms accordingly. +// Replaces o.R.DrugMechanisms with related. +// Sets related.R.DrugMechanismActionType's DrugMechanisms accordingly. +func (o *ActionType) SetDrugMechanisms(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*DrugMechanism) error { + query := "update \"drug_mechanism\" set \"action_type\" = null where \"action_type\" = ?" + values := []interface{}{o.ActionType} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + if o.R != nil { + for _, rel := range o.R.DrugMechanisms { + queries.SetScanner(&rel.ActionType, nil) + if rel.R == nil { + continue + } + + rel.R.DrugMechanismActionType = nil + } + o.R.DrugMechanisms = nil + } + + return o.AddDrugMechanisms(ctx, exec, insert, related...) +} + +// RemoveDrugMechanisms relationships from objects passed in. +// Removes related items from R.DrugMechanisms (uses pointer comparison, removal does not keep order) +// Sets related.R.DrugMechanismActionType. +func (o *ActionType) RemoveDrugMechanisms(ctx context.Context, exec boil.ContextExecutor, related ...*DrugMechanism) error { + if len(related) == 0 { + return nil + } + + var err error + for _, rel := range related { + queries.SetScanner(&rel.ActionType, nil) + if rel.R != nil { + rel.R.DrugMechanismActionType = nil + } + if _, err = rel.Update(ctx, exec, boil.Whitelist("action_type")); err != nil { + return err + } + } + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.DrugMechanisms { + if rel != ri { + continue + } + + ln := len(o.R.DrugMechanisms) + if ln > 1 && i < ln-1 { + o.R.DrugMechanisms[i] = o.R.DrugMechanisms[ln-1] + } + o.R.DrugMechanisms = o.R.DrugMechanisms[:ln-1] + break + } + } + + return nil +} + +// ActionTypes retrieves all the records using an executor. +func ActionTypes(mods ...qm.QueryMod) actionTypeQuery { + mods = append(mods, qm.From("\"action_type\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"action_type\".*"}) + } + + return actionTypeQuery{q} +} + +// FindActionType retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindActionType(ctx context.Context, exec boil.ContextExecutor, actionType string, selectCols ...string) (*ActionType, error) { + actionTypeObj := &ActionType{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"action_type\" where \"action_type\"=?", sel, + ) + + q := queries.Raw(query, actionType) + + err := q.Bind(ctx, exec, actionTypeObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from action_type") + } + + if err = actionTypeObj.doAfterSelectHooks(ctx, exec); err != nil { + return actionTypeObj, err + } + + return actionTypeObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *ActionType) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no action_type provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(actionTypeColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + actionTypeInsertCacheMut.RLock() + cache, cached := actionTypeInsertCache[key] + actionTypeInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + actionTypeAllColumns, + actionTypeColumnsWithDefault, + actionTypeColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(actionTypeType, actionTypeMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(actionTypeType, actionTypeMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"action_type\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"action_type\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into action_type") + } + + if !cached { + actionTypeInsertCacheMut.Lock() + actionTypeInsertCache[key] = cache + actionTypeInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the ActionType. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *ActionType) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + actionTypeUpdateCacheMut.RLock() + cache, cached := actionTypeUpdateCache[key] + actionTypeUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + actionTypeAllColumns, + actionTypePrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update action_type, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"action_type\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, actionTypePrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(actionTypeType, actionTypeMapping, append(wl, actionTypePrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update action_type row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for action_type") + } + + if !cached { + actionTypeUpdateCacheMut.Lock() + actionTypeUpdateCache[key] = cache + actionTypeUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q actionTypeQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for action_type") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for action_type") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o ActionTypeSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), actionTypePrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"action_type\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, actionTypePrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in actionType slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all actionType") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *ActionType) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no action_type provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(actionTypeColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + actionTypeUpsertCacheMut.RLock() + cache, cached := actionTypeUpsertCache[key] + actionTypeUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + actionTypeAllColumns, + actionTypeColumnsWithDefault, + actionTypeColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + actionTypeAllColumns, + actionTypePrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert action_type, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(actionTypePrimaryKeyColumns)) + copy(conflict, actionTypePrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"action_type\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(actionTypeType, actionTypeMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(actionTypeType, actionTypeMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert action_type") + } + + if !cached { + actionTypeUpsertCacheMut.Lock() + actionTypeUpsertCache[key] = cache + actionTypeUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single ActionType record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *ActionType) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no ActionType provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), actionTypePrimaryKeyMapping) + sql := "DELETE FROM \"action_type\" WHERE \"action_type\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from action_type") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for action_type") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q actionTypeQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no actionTypeQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from action_type") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for action_type") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o ActionTypeSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(actionTypeBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), actionTypePrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"action_type\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, actionTypePrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from actionType slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for action_type") + } + + if len(actionTypeAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *ActionType) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindActionType(ctx, exec, o.ActionType) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *ActionTypeSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := ActionTypeSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), actionTypePrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"action_type\".* FROM \"action_type\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, actionTypePrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in ActionTypeSlice") + } + + *o = slice + + return nil +} + +// ActionTypeExists checks if the ActionType row exists. +func ActionTypeExists(ctx context.Context, exec boil.ContextExecutor, actionType string) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"action_type\" where \"action_type\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, actionType) + } + row := exec.QueryRowContext(ctx, sql, actionType) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if action_type exists") + } + + return exists, nil +} diff --git a/models/activities.go b/models/activities.go new file mode 100644 index 0000000..ccc93a8 --- /dev/null +++ b/models/activities.go @@ -0,0 +1,3324 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/sqlboiler/v4/types" + "github.com/volatiletech/strmangle" +) + +// Activity is an object representing the database table. +type Activity struct { + ActivityID int64 `boil:"activity_id" json:"activity_id" toml:"activity_id" yaml:"activity_id"` + AssayID int64 `boil:"assay_id" json:"assay_id" toml:"assay_id" yaml:"assay_id"` + DocID null.Int64 `boil:"doc_id" json:"doc_id,omitempty" toml:"doc_id" yaml:"doc_id,omitempty"` + RecordID int64 `boil:"record_id" json:"record_id" toml:"record_id" yaml:"record_id"` + Molregno null.Int64 `boil:"molregno" json:"molregno,omitempty" toml:"molregno" yaml:"molregno,omitempty"` + StandardRelation null.String `boil:"standard_relation" json:"standard_relation,omitempty" toml:"standard_relation" yaml:"standard_relation,omitempty"` + StandardValue types.NullDecimal `boil:"standard_value" json:"standard_value,omitempty" toml:"standard_value" yaml:"standard_value,omitempty"` + StandardUnits null.String `boil:"standard_units" json:"standard_units,omitempty" toml:"standard_units" yaml:"standard_units,omitempty"` + StandardFlag null.Int16 `boil:"standard_flag" json:"standard_flag,omitempty" toml:"standard_flag" yaml:"standard_flag,omitempty"` + StandardType null.String `boil:"standard_type" json:"standard_type,omitempty" toml:"standard_type" yaml:"standard_type,omitempty"` + ActivityComment null.String `boil:"activity_comment" json:"activity_comment,omitempty" toml:"activity_comment" yaml:"activity_comment,omitempty"` + DataValidityComment null.String `boil:"data_validity_comment" json:"data_validity_comment,omitempty" toml:"data_validity_comment" yaml:"data_validity_comment,omitempty"` + PotentialDuplicate null.Int16 `boil:"potential_duplicate" json:"potential_duplicate,omitempty" toml:"potential_duplicate" yaml:"potential_duplicate,omitempty"` + PchemblValue types.NullDecimal `boil:"pchembl_value" json:"pchembl_value,omitempty" toml:"pchembl_value" yaml:"pchembl_value,omitempty"` + BaoEndpoint null.String `boil:"bao_endpoint" json:"bao_endpoint,omitempty" toml:"bao_endpoint" yaml:"bao_endpoint,omitempty"` + UoUnits null.String `boil:"uo_units" json:"uo_units,omitempty" toml:"uo_units" yaml:"uo_units,omitempty"` + QudtUnits null.String `boil:"qudt_units" json:"qudt_units,omitempty" toml:"qudt_units" yaml:"qudt_units,omitempty"` + Toid null.Int64 `boil:"toid" json:"toid,omitempty" toml:"toid" yaml:"toid,omitempty"` + UpperValue types.NullDecimal `boil:"upper_value" json:"upper_value,omitempty" toml:"upper_value" yaml:"upper_value,omitempty"` + StandardUpperValue types.NullDecimal `boil:"standard_upper_value" json:"standard_upper_value,omitempty" toml:"standard_upper_value" yaml:"standard_upper_value,omitempty"` + SRCID null.Int64 `boil:"src_id" json:"src_id,omitempty" toml:"src_id" yaml:"src_id,omitempty"` + Type string `boil:"type" json:"type" toml:"type" yaml:"type"` + Relation null.String `boil:"relation" json:"relation,omitempty" toml:"relation" yaml:"relation,omitempty"` + Value types.NullDecimal `boil:"value" json:"value,omitempty" toml:"value" yaml:"value,omitempty"` + Units null.String `boil:"units" json:"units,omitempty" toml:"units" yaml:"units,omitempty"` + TextValue null.String `boil:"text_value" json:"text_value,omitempty" toml:"text_value" yaml:"text_value,omitempty"` + StandardTextValue null.String `boil:"standard_text_value" json:"standard_text_value,omitempty" toml:"standard_text_value" yaml:"standard_text_value,omitempty"` + + R *activityR `boil:"-" json:"-" toml:"-" yaml:"-"` + L activityL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var ActivityColumns = struct { + ActivityID string + AssayID string + DocID string + RecordID string + Molregno string + StandardRelation string + StandardValue string + StandardUnits string + StandardFlag string + StandardType string + ActivityComment string + DataValidityComment string + PotentialDuplicate string + PchemblValue string + BaoEndpoint string + UoUnits string + QudtUnits string + Toid string + UpperValue string + StandardUpperValue string + SRCID string + Type string + Relation string + Value string + Units string + TextValue string + StandardTextValue string +}{ + ActivityID: "activity_id", + AssayID: "assay_id", + DocID: "doc_id", + RecordID: "record_id", + Molregno: "molregno", + StandardRelation: "standard_relation", + StandardValue: "standard_value", + StandardUnits: "standard_units", + StandardFlag: "standard_flag", + StandardType: "standard_type", + ActivityComment: "activity_comment", + DataValidityComment: "data_validity_comment", + PotentialDuplicate: "potential_duplicate", + PchemblValue: "pchembl_value", + BaoEndpoint: "bao_endpoint", + UoUnits: "uo_units", + QudtUnits: "qudt_units", + Toid: "toid", + UpperValue: "upper_value", + StandardUpperValue: "standard_upper_value", + SRCID: "src_id", + Type: "type", + Relation: "relation", + Value: "value", + Units: "units", + TextValue: "text_value", + StandardTextValue: "standard_text_value", +} + +var ActivityTableColumns = struct { + ActivityID string + AssayID string + DocID string + RecordID string + Molregno string + StandardRelation string + StandardValue string + StandardUnits string + StandardFlag string + StandardType string + ActivityComment string + DataValidityComment string + PotentialDuplicate string + PchemblValue string + BaoEndpoint string + UoUnits string + QudtUnits string + Toid string + UpperValue string + StandardUpperValue string + SRCID string + Type string + Relation string + Value string + Units string + TextValue string + StandardTextValue string +}{ + ActivityID: "activities.activity_id", + AssayID: "activities.assay_id", + DocID: "activities.doc_id", + RecordID: "activities.record_id", + Molregno: "activities.molregno", + StandardRelation: "activities.standard_relation", + StandardValue: "activities.standard_value", + StandardUnits: "activities.standard_units", + StandardFlag: "activities.standard_flag", + StandardType: "activities.standard_type", + ActivityComment: "activities.activity_comment", + DataValidityComment: "activities.data_validity_comment", + PotentialDuplicate: "activities.potential_duplicate", + PchemblValue: "activities.pchembl_value", + BaoEndpoint: "activities.bao_endpoint", + UoUnits: "activities.uo_units", + QudtUnits: "activities.qudt_units", + Toid: "activities.toid", + UpperValue: "activities.upper_value", + StandardUpperValue: "activities.standard_upper_value", + SRCID: "activities.src_id", + Type: "activities.type", + Relation: "activities.relation", + Value: "activities.value", + Units: "activities.units", + TextValue: "activities.text_value", + StandardTextValue: "activities.standard_text_value", +} + +// Generated where + +type whereHelperint64 struct{ field string } + +func (w whereHelperint64) EQ(x int64) qm.QueryMod { return qmhelper.Where(w.field, qmhelper.EQ, x) } +func (w whereHelperint64) NEQ(x int64) qm.QueryMod { return qmhelper.Where(w.field, qmhelper.NEQ, x) } +func (w whereHelperint64) LT(x int64) qm.QueryMod { return qmhelper.Where(w.field, qmhelper.LT, x) } +func (w whereHelperint64) LTE(x int64) qm.QueryMod { return qmhelper.Where(w.field, qmhelper.LTE, x) } +func (w whereHelperint64) GT(x int64) qm.QueryMod { return qmhelper.Where(w.field, qmhelper.GT, x) } +func (w whereHelperint64) GTE(x int64) qm.QueryMod { return qmhelper.Where(w.field, qmhelper.GTE, x) } +func (w whereHelperint64) IN(slice []int64) qm.QueryMod { + values := make([]interface{}, 0, len(slice)) + for _, value := range slice { + values = append(values, value) + } + return qm.WhereIn(fmt.Sprintf("%s IN ?", w.field), values...) +} +func (w whereHelperint64) NIN(slice []int64) qm.QueryMod { + values := make([]interface{}, 0, len(slice)) + for _, value := range slice { + values = append(values, value) + } + return qm.WhereNotIn(fmt.Sprintf("%s NOT IN ?", w.field), values...) +} + +type whereHelpernull_Int64 struct{ field string } + +func (w whereHelpernull_Int64) EQ(x null.Int64) qm.QueryMod { + return qmhelper.WhereNullEQ(w.field, false, x) +} +func (w whereHelpernull_Int64) NEQ(x null.Int64) qm.QueryMod { + return qmhelper.WhereNullEQ(w.field, true, x) +} +func (w whereHelpernull_Int64) LT(x null.Int64) qm.QueryMod { + return qmhelper.Where(w.field, qmhelper.LT, x) +} +func (w whereHelpernull_Int64) LTE(x null.Int64) qm.QueryMod { + return qmhelper.Where(w.field, qmhelper.LTE, x) +} +func (w whereHelpernull_Int64) GT(x null.Int64) qm.QueryMod { + return qmhelper.Where(w.field, qmhelper.GT, x) +} +func (w whereHelpernull_Int64) GTE(x null.Int64) qm.QueryMod { + return qmhelper.Where(w.field, qmhelper.GTE, x) +} + +func (w whereHelpernull_Int64) IsNull() qm.QueryMod { return qmhelper.WhereIsNull(w.field) } +func (w whereHelpernull_Int64) IsNotNull() qm.QueryMod { return qmhelper.WhereIsNotNull(w.field) } + +type whereHelpertypes_NullDecimal struct{ field string } + +func (w whereHelpertypes_NullDecimal) EQ(x types.NullDecimal) qm.QueryMod { + return qmhelper.WhereNullEQ(w.field, false, x) +} +func (w whereHelpertypes_NullDecimal) NEQ(x types.NullDecimal) qm.QueryMod { + return qmhelper.WhereNullEQ(w.field, true, x) +} +func (w whereHelpertypes_NullDecimal) LT(x types.NullDecimal) qm.QueryMod { + return qmhelper.Where(w.field, qmhelper.LT, x) +} +func (w whereHelpertypes_NullDecimal) LTE(x types.NullDecimal) qm.QueryMod { + return qmhelper.Where(w.field, qmhelper.LTE, x) +} +func (w whereHelpertypes_NullDecimal) GT(x types.NullDecimal) qm.QueryMod { + return qmhelper.Where(w.field, qmhelper.GT, x) +} +func (w whereHelpertypes_NullDecimal) GTE(x types.NullDecimal) qm.QueryMod { + return qmhelper.Where(w.field, qmhelper.GTE, x) +} + +func (w whereHelpertypes_NullDecimal) IsNull() qm.QueryMod { return qmhelper.WhereIsNull(w.field) } +func (w whereHelpertypes_NullDecimal) IsNotNull() qm.QueryMod { + return qmhelper.WhereIsNotNull(w.field) +} + +type whereHelpernull_Int16 struct{ field string } + +func (w whereHelpernull_Int16) EQ(x null.Int16) qm.QueryMod { + return qmhelper.WhereNullEQ(w.field, false, x) +} +func (w whereHelpernull_Int16) NEQ(x null.Int16) qm.QueryMod { + return qmhelper.WhereNullEQ(w.field, true, x) +} +func (w whereHelpernull_Int16) LT(x null.Int16) qm.QueryMod { + return qmhelper.Where(w.field, qmhelper.LT, x) +} +func (w whereHelpernull_Int16) LTE(x null.Int16) qm.QueryMod { + return qmhelper.Where(w.field, qmhelper.LTE, x) +} +func (w whereHelpernull_Int16) GT(x null.Int16) qm.QueryMod { + return qmhelper.Where(w.field, qmhelper.GT, x) +} +func (w whereHelpernull_Int16) GTE(x null.Int16) qm.QueryMod { + return qmhelper.Where(w.field, qmhelper.GTE, x) +} + +func (w whereHelpernull_Int16) IsNull() qm.QueryMod { return qmhelper.WhereIsNull(w.field) } +func (w whereHelpernull_Int16) IsNotNull() qm.QueryMod { return qmhelper.WhereIsNotNull(w.field) } + +var ActivityWhere = struct { + ActivityID whereHelperint64 + AssayID whereHelperint64 + DocID whereHelpernull_Int64 + RecordID whereHelperint64 + Molregno whereHelpernull_Int64 + StandardRelation whereHelpernull_String + StandardValue whereHelpertypes_NullDecimal + StandardUnits whereHelpernull_String + StandardFlag whereHelpernull_Int16 + StandardType whereHelpernull_String + ActivityComment whereHelpernull_String + DataValidityComment whereHelpernull_String + PotentialDuplicate whereHelpernull_Int16 + PchemblValue whereHelpertypes_NullDecimal + BaoEndpoint whereHelpernull_String + UoUnits whereHelpernull_String + QudtUnits whereHelpernull_String + Toid whereHelpernull_Int64 + UpperValue whereHelpertypes_NullDecimal + StandardUpperValue whereHelpertypes_NullDecimal + SRCID whereHelpernull_Int64 + Type whereHelperstring + Relation whereHelpernull_String + Value whereHelpertypes_NullDecimal + Units whereHelpernull_String + TextValue whereHelpernull_String + StandardTextValue whereHelpernull_String +}{ + ActivityID: whereHelperint64{field: "\"activities\".\"activity_id\""}, + AssayID: whereHelperint64{field: "\"activities\".\"assay_id\""}, + DocID: whereHelpernull_Int64{field: "\"activities\".\"doc_id\""}, + RecordID: whereHelperint64{field: "\"activities\".\"record_id\""}, + Molregno: whereHelpernull_Int64{field: "\"activities\".\"molregno\""}, + StandardRelation: whereHelpernull_String{field: "\"activities\".\"standard_relation\""}, + StandardValue: whereHelpertypes_NullDecimal{field: "\"activities\".\"standard_value\""}, + StandardUnits: whereHelpernull_String{field: "\"activities\".\"standard_units\""}, + StandardFlag: whereHelpernull_Int16{field: "\"activities\".\"standard_flag\""}, + StandardType: whereHelpernull_String{field: "\"activities\".\"standard_type\""}, + ActivityComment: whereHelpernull_String{field: "\"activities\".\"activity_comment\""}, + DataValidityComment: whereHelpernull_String{field: "\"activities\".\"data_validity_comment\""}, + PotentialDuplicate: whereHelpernull_Int16{field: "\"activities\".\"potential_duplicate\""}, + PchemblValue: whereHelpertypes_NullDecimal{field: "\"activities\".\"pchembl_value\""}, + BaoEndpoint: whereHelpernull_String{field: "\"activities\".\"bao_endpoint\""}, + UoUnits: whereHelpernull_String{field: "\"activities\".\"uo_units\""}, + QudtUnits: whereHelpernull_String{field: "\"activities\".\"qudt_units\""}, + Toid: whereHelpernull_Int64{field: "\"activities\".\"toid\""}, + UpperValue: whereHelpertypes_NullDecimal{field: "\"activities\".\"upper_value\""}, + StandardUpperValue: whereHelpertypes_NullDecimal{field: "\"activities\".\"standard_upper_value\""}, + SRCID: whereHelpernull_Int64{field: "\"activities\".\"src_id\""}, + Type: whereHelperstring{field: "\"activities\".\"type\""}, + Relation: whereHelpernull_String{field: "\"activities\".\"relation\""}, + Value: whereHelpertypes_NullDecimal{field: "\"activities\".\"value\""}, + Units: whereHelpernull_String{field: "\"activities\".\"units\""}, + TextValue: whereHelpernull_String{field: "\"activities\".\"text_value\""}, + StandardTextValue: whereHelpernull_String{field: "\"activities\".\"standard_text_value\""}, +} + +// ActivityRels is where relationship names are stored. +var ActivityRels = struct { + DataValidityCommentDataValidityLookup string + SRC string + Record string + MolregnoMoleculeDictionary string + Doc string + BaoEndpointBioassayOntology string + Assay string + LigandEff string + ActivityProperties string + ActivitySuppMaps string + PredictedBindingDomains string +}{ + DataValidityCommentDataValidityLookup: "DataValidityCommentDataValidityLookup", + SRC: "SRC", + Record: "Record", + MolregnoMoleculeDictionary: "MolregnoMoleculeDictionary", + Doc: "Doc", + BaoEndpointBioassayOntology: "BaoEndpointBioassayOntology", + Assay: "Assay", + LigandEff: "LigandEff", + ActivityProperties: "ActivityProperties", + ActivitySuppMaps: "ActivitySuppMaps", + PredictedBindingDomains: "PredictedBindingDomains", +} + +// activityR is where relationships are stored. +type activityR struct { + DataValidityCommentDataValidityLookup *DataValidityLookup `boil:"DataValidityCommentDataValidityLookup" json:"DataValidityCommentDataValidityLookup" toml:"DataValidityCommentDataValidityLookup" yaml:"DataValidityCommentDataValidityLookup"` + SRC *Source `boil:"SRC" json:"SRC" toml:"SRC" yaml:"SRC"` + Record *CompoundRecord `boil:"Record" json:"Record" toml:"Record" yaml:"Record"` + MolregnoMoleculeDictionary *MoleculeDictionary `boil:"MolregnoMoleculeDictionary" json:"MolregnoMoleculeDictionary" toml:"MolregnoMoleculeDictionary" yaml:"MolregnoMoleculeDictionary"` + Doc *Doc `boil:"Doc" json:"Doc" toml:"Doc" yaml:"Doc"` + BaoEndpointBioassayOntology *BioassayOntology `boil:"BaoEndpointBioassayOntology" json:"BaoEndpointBioassayOntology" toml:"BaoEndpointBioassayOntology" yaml:"BaoEndpointBioassayOntology"` + Assay *Assay `boil:"Assay" json:"Assay" toml:"Assay" yaml:"Assay"` + LigandEff *LigandEff `boil:"LigandEff" json:"LigandEff" toml:"LigandEff" yaml:"LigandEff"` + ActivityProperties ActivityPropertySlice `boil:"ActivityProperties" json:"ActivityProperties" toml:"ActivityProperties" yaml:"ActivityProperties"` + ActivitySuppMaps ActivitySuppMapSlice `boil:"ActivitySuppMaps" json:"ActivitySuppMaps" toml:"ActivitySuppMaps" yaml:"ActivitySuppMaps"` + PredictedBindingDomains PredictedBindingDomainSlice `boil:"PredictedBindingDomains" json:"PredictedBindingDomains" toml:"PredictedBindingDomains" yaml:"PredictedBindingDomains"` +} + +// NewStruct creates a new relationship struct +func (*activityR) NewStruct() *activityR { + return &activityR{} +} + +func (r *activityR) GetDataValidityCommentDataValidityLookup() *DataValidityLookup { + if r == nil { + return nil + } + return r.DataValidityCommentDataValidityLookup +} + +func (r *activityR) GetSRC() *Source { + if r == nil { + return nil + } + return r.SRC +} + +func (r *activityR) GetRecord() *CompoundRecord { + if r == nil { + return nil + } + return r.Record +} + +func (r *activityR) GetMolregnoMoleculeDictionary() *MoleculeDictionary { + if r == nil { + return nil + } + return r.MolregnoMoleculeDictionary +} + +func (r *activityR) GetDoc() *Doc { + if r == nil { + return nil + } + return r.Doc +} + +func (r *activityR) GetBaoEndpointBioassayOntology() *BioassayOntology { + if r == nil { + return nil + } + return r.BaoEndpointBioassayOntology +} + +func (r *activityR) GetAssay() *Assay { + if r == nil { + return nil + } + return r.Assay +} + +func (r *activityR) GetLigandEff() *LigandEff { + if r == nil { + return nil + } + return r.LigandEff +} + +func (r *activityR) GetActivityProperties() ActivityPropertySlice { + if r == nil { + return nil + } + return r.ActivityProperties +} + +func (r *activityR) GetActivitySuppMaps() ActivitySuppMapSlice { + if r == nil { + return nil + } + return r.ActivitySuppMaps +} + +func (r *activityR) GetPredictedBindingDomains() PredictedBindingDomainSlice { + if r == nil { + return nil + } + return r.PredictedBindingDomains +} + +// activityL is where Load methods for each relationship are stored. +type activityL struct{} + +var ( + activityAllColumns = []string{"activity_id", "assay_id", "doc_id", "record_id", "molregno", "standard_relation", "standard_value", "standard_units", "standard_flag", "standard_type", "activity_comment", "data_validity_comment", "potential_duplicate", "pchembl_value", "bao_endpoint", "uo_units", "qudt_units", "toid", "upper_value", "standard_upper_value", "src_id", "type", "relation", "value", "units", "text_value", "standard_text_value"} + activityColumnsWithoutDefault = []string{"activity_id", "assay_id", "record_id", "type"} + activityColumnsWithDefault = []string{"doc_id", "molregno", "standard_relation", "standard_value", "standard_units", "standard_flag", "standard_type", "activity_comment", "data_validity_comment", "potential_duplicate", "pchembl_value", "bao_endpoint", "uo_units", "qudt_units", "toid", "upper_value", "standard_upper_value", "src_id", "relation", "value", "units", "text_value", "standard_text_value"} + activityPrimaryKeyColumns = []string{"activity_id"} + activityGeneratedColumns = []string{} +) + +type ( + // ActivitySlice is an alias for a slice of pointers to Activity. + // This should almost always be used instead of []Activity. + ActivitySlice []*Activity + // ActivityHook is the signature for custom Activity hook methods + ActivityHook func(context.Context, boil.ContextExecutor, *Activity) error + + activityQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + activityType = reflect.TypeOf(&Activity{}) + activityMapping = queries.MakeStructMapping(activityType) + activityPrimaryKeyMapping, _ = queries.BindMapping(activityType, activityMapping, activityPrimaryKeyColumns) + activityInsertCacheMut sync.RWMutex + activityInsertCache = make(map[string]insertCache) + activityUpdateCacheMut sync.RWMutex + activityUpdateCache = make(map[string]updateCache) + activityUpsertCacheMut sync.RWMutex + activityUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var activityAfterSelectHooks []ActivityHook + +var activityBeforeInsertHooks []ActivityHook +var activityAfterInsertHooks []ActivityHook + +var activityBeforeUpdateHooks []ActivityHook +var activityAfterUpdateHooks []ActivityHook + +var activityBeforeDeleteHooks []ActivityHook +var activityAfterDeleteHooks []ActivityHook + +var activityBeforeUpsertHooks []ActivityHook +var activityAfterUpsertHooks []ActivityHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *Activity) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activityAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *Activity) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activityBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *Activity) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activityAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *Activity) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activityBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *Activity) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activityAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *Activity) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activityBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *Activity) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activityAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *Activity) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activityBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *Activity) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activityAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddActivityHook registers your hook function for all future operations. +func AddActivityHook(hookPoint boil.HookPoint, activityHook ActivityHook) { + switch hookPoint { + case boil.AfterSelectHook: + activityAfterSelectHooks = append(activityAfterSelectHooks, activityHook) + case boil.BeforeInsertHook: + activityBeforeInsertHooks = append(activityBeforeInsertHooks, activityHook) + case boil.AfterInsertHook: + activityAfterInsertHooks = append(activityAfterInsertHooks, activityHook) + case boil.BeforeUpdateHook: + activityBeforeUpdateHooks = append(activityBeforeUpdateHooks, activityHook) + case boil.AfterUpdateHook: + activityAfterUpdateHooks = append(activityAfterUpdateHooks, activityHook) + case boil.BeforeDeleteHook: + activityBeforeDeleteHooks = append(activityBeforeDeleteHooks, activityHook) + case boil.AfterDeleteHook: + activityAfterDeleteHooks = append(activityAfterDeleteHooks, activityHook) + case boil.BeforeUpsertHook: + activityBeforeUpsertHooks = append(activityBeforeUpsertHooks, activityHook) + case boil.AfterUpsertHook: + activityAfterUpsertHooks = append(activityAfterUpsertHooks, activityHook) + } +} + +// One returns a single activity record from the query. +func (q activityQuery) One(ctx context.Context, exec boil.ContextExecutor) (*Activity, error) { + o := &Activity{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for activities") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all Activity records from the query. +func (q activityQuery) All(ctx context.Context, exec boil.ContextExecutor) (ActivitySlice, error) { + var o []*Activity + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to Activity slice") + } + + if len(activityAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all Activity records in the query. +func (q activityQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count activities rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q activityQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if activities exists") + } + + return count > 0, nil +} + +// DataValidityCommentDataValidityLookup pointed to by the foreign key. +func (o *Activity) DataValidityCommentDataValidityLookup(mods ...qm.QueryMod) dataValidityLookupQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"data_validity_comment\" = ?", o.DataValidityComment), + } + + queryMods = append(queryMods, mods...) + + return DataValidityLookups(queryMods...) +} + +// SRC pointed to by the foreign key. +func (o *Activity) SRC(mods ...qm.QueryMod) sourceQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"src_id\" = ?", o.SRCID), + } + + queryMods = append(queryMods, mods...) + + return Sources(queryMods...) +} + +// Record pointed to by the foreign key. +func (o *Activity) Record(mods ...qm.QueryMod) compoundRecordQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"record_id\" = ?", o.RecordID), + } + + queryMods = append(queryMods, mods...) + + return CompoundRecords(queryMods...) +} + +// MolregnoMoleculeDictionary pointed to by the foreign key. +func (o *Activity) MolregnoMoleculeDictionary(mods ...qm.QueryMod) moleculeDictionaryQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"molregno\" = ?", o.Molregno), + } + + queryMods = append(queryMods, mods...) + + return MoleculeDictionaries(queryMods...) +} + +// Doc pointed to by the foreign key. +func (o *Activity) Doc(mods ...qm.QueryMod) docQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"doc_id\" = ?", o.DocID), + } + + queryMods = append(queryMods, mods...) + + return Docs(queryMods...) +} + +// BaoEndpointBioassayOntology pointed to by the foreign key. +func (o *Activity) BaoEndpointBioassayOntology(mods ...qm.QueryMod) bioassayOntologyQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"bao_id\" = ?", o.BaoEndpoint), + } + + queryMods = append(queryMods, mods...) + + return BioassayOntologies(queryMods...) +} + +// Assay pointed to by the foreign key. +func (o *Activity) Assay(mods ...qm.QueryMod) assayQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"assay_id\" = ?", o.AssayID), + } + + queryMods = append(queryMods, mods...) + + return Assays(queryMods...) +} + +// LigandEff pointed to by the foreign key. +func (o *Activity) LigandEff(mods ...qm.QueryMod) ligandEffQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"activity_id\" = ?", o.ActivityID), + } + + queryMods = append(queryMods, mods...) + + return LigandEffs(queryMods...) +} + +// ActivityProperties retrieves all the activity_property's ActivityProperties with an executor. +func (o *Activity) ActivityProperties(mods ...qm.QueryMod) activityPropertyQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"activity_properties\".\"activity_id\"=?", o.ActivityID), + ) + + return ActivityProperties(queryMods...) +} + +// ActivitySuppMaps retrieves all the activity_supp_map's ActivitySuppMaps with an executor. +func (o *Activity) ActivitySuppMaps(mods ...qm.QueryMod) activitySuppMapQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"activity_supp_map\".\"activity_id\"=?", o.ActivityID), + ) + + return ActivitySuppMaps(queryMods...) +} + +// PredictedBindingDomains retrieves all the predicted_binding_domain's PredictedBindingDomains with an executor. +func (o *Activity) PredictedBindingDomains(mods ...qm.QueryMod) predictedBindingDomainQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"predicted_binding_domains\".\"activity_id\"=?", o.ActivityID), + ) + + return PredictedBindingDomains(queryMods...) +} + +// LoadDataValidityCommentDataValidityLookup allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (activityL) LoadDataValidityCommentDataValidityLookup(ctx context.Context, e boil.ContextExecutor, singular bool, maybeActivity interface{}, mods queries.Applicator) error { + var slice []*Activity + var object *Activity + + if singular { + object = maybeActivity.(*Activity) + } else { + slice = *maybeActivity.(*[]*Activity) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &activityR{} + } + if !queries.IsNil(object.DataValidityComment) { + args = append(args, object.DataValidityComment) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &activityR{} + } + + for _, a := range args { + if queries.Equal(a, obj.DataValidityComment) { + continue Outer + } + } + + if !queries.IsNil(obj.DataValidityComment) { + args = append(args, obj.DataValidityComment) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`data_validity_lookup`), + qm.WhereIn(`data_validity_lookup.data_validity_comment in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load DataValidityLookup") + } + + var resultSlice []*DataValidityLookup + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice DataValidityLookup") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for data_validity_lookup") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for data_validity_lookup") + } + + if len(activityAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.DataValidityCommentDataValidityLookup = foreign + if foreign.R == nil { + foreign.R = &dataValidityLookupR{} + } + foreign.R.DataValidityCommentActivities = append(foreign.R.DataValidityCommentActivities, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.DataValidityComment, foreign.DataValidityComment) { + local.R.DataValidityCommentDataValidityLookup = foreign + if foreign.R == nil { + foreign.R = &dataValidityLookupR{} + } + foreign.R.DataValidityCommentActivities = append(foreign.R.DataValidityCommentActivities, local) + break + } + } + } + + return nil +} + +// LoadSRC allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (activityL) LoadSRC(ctx context.Context, e boil.ContextExecutor, singular bool, maybeActivity interface{}, mods queries.Applicator) error { + var slice []*Activity + var object *Activity + + if singular { + object = maybeActivity.(*Activity) + } else { + slice = *maybeActivity.(*[]*Activity) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &activityR{} + } + if !queries.IsNil(object.SRCID) { + args = append(args, object.SRCID) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &activityR{} + } + + for _, a := range args { + if queries.Equal(a, obj.SRCID) { + continue Outer + } + } + + if !queries.IsNil(obj.SRCID) { + args = append(args, obj.SRCID) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`source`), + qm.WhereIn(`source.src_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load Source") + } + + var resultSlice []*Source + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice Source") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for source") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for source") + } + + if len(activityAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.SRC = foreign + if foreign.R == nil { + foreign.R = &sourceR{} + } + foreign.R.SRCActivities = append(foreign.R.SRCActivities, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.SRCID, foreign.SRCID) { + local.R.SRC = foreign + if foreign.R == nil { + foreign.R = &sourceR{} + } + foreign.R.SRCActivities = append(foreign.R.SRCActivities, local) + break + } + } + } + + return nil +} + +// LoadRecord allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (activityL) LoadRecord(ctx context.Context, e boil.ContextExecutor, singular bool, maybeActivity interface{}, mods queries.Applicator) error { + var slice []*Activity + var object *Activity + + if singular { + object = maybeActivity.(*Activity) + } else { + slice = *maybeActivity.(*[]*Activity) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &activityR{} + } + args = append(args, object.RecordID) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &activityR{} + } + + for _, a := range args { + if a == obj.RecordID { + continue Outer + } + } + + args = append(args, obj.RecordID) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`compound_records`), + qm.WhereIn(`compound_records.record_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load CompoundRecord") + } + + var resultSlice []*CompoundRecord + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice CompoundRecord") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for compound_records") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for compound_records") + } + + if len(activityAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Record = foreign + if foreign.R == nil { + foreign.R = &compoundRecordR{} + } + foreign.R.RecordActivities = append(foreign.R.RecordActivities, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.RecordID == foreign.RecordID { + local.R.Record = foreign + if foreign.R == nil { + foreign.R = &compoundRecordR{} + } + foreign.R.RecordActivities = append(foreign.R.RecordActivities, local) + break + } + } + } + + return nil +} + +// LoadMolregnoMoleculeDictionary allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (activityL) LoadMolregnoMoleculeDictionary(ctx context.Context, e boil.ContextExecutor, singular bool, maybeActivity interface{}, mods queries.Applicator) error { + var slice []*Activity + var object *Activity + + if singular { + object = maybeActivity.(*Activity) + } else { + slice = *maybeActivity.(*[]*Activity) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &activityR{} + } + if !queries.IsNil(object.Molregno) { + args = append(args, object.Molregno) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &activityR{} + } + + for _, a := range args { + if queries.Equal(a, obj.Molregno) { + continue Outer + } + } + + if !queries.IsNil(obj.Molregno) { + args = append(args, obj.Molregno) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`molecule_dictionary`), + qm.WhereIn(`molecule_dictionary.molregno in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load MoleculeDictionary") + } + + var resultSlice []*MoleculeDictionary + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice MoleculeDictionary") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for molecule_dictionary") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for molecule_dictionary") + } + + if len(activityAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.MolregnoMoleculeDictionary = foreign + if foreign.R == nil { + foreign.R = &moleculeDictionaryR{} + } + foreign.R.MolregnoActivities = append(foreign.R.MolregnoActivities, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.Molregno, foreign.Molregno) { + local.R.MolregnoMoleculeDictionary = foreign + if foreign.R == nil { + foreign.R = &moleculeDictionaryR{} + } + foreign.R.MolregnoActivities = append(foreign.R.MolregnoActivities, local) + break + } + } + } + + return nil +} + +// LoadDoc allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (activityL) LoadDoc(ctx context.Context, e boil.ContextExecutor, singular bool, maybeActivity interface{}, mods queries.Applicator) error { + var slice []*Activity + var object *Activity + + if singular { + object = maybeActivity.(*Activity) + } else { + slice = *maybeActivity.(*[]*Activity) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &activityR{} + } + if !queries.IsNil(object.DocID) { + args = append(args, object.DocID) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &activityR{} + } + + for _, a := range args { + if queries.Equal(a, obj.DocID) { + continue Outer + } + } + + if !queries.IsNil(obj.DocID) { + args = append(args, obj.DocID) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`docs`), + qm.WhereIn(`docs.doc_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load Doc") + } + + var resultSlice []*Doc + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice Doc") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for docs") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for docs") + } + + if len(activityAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Doc = foreign + if foreign.R == nil { + foreign.R = &docR{} + } + foreign.R.Activities = append(foreign.R.Activities, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.DocID, foreign.DocID) { + local.R.Doc = foreign + if foreign.R == nil { + foreign.R = &docR{} + } + foreign.R.Activities = append(foreign.R.Activities, local) + break + } + } + } + + return nil +} + +// LoadBaoEndpointBioassayOntology allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (activityL) LoadBaoEndpointBioassayOntology(ctx context.Context, e boil.ContextExecutor, singular bool, maybeActivity interface{}, mods queries.Applicator) error { + var slice []*Activity + var object *Activity + + if singular { + object = maybeActivity.(*Activity) + } else { + slice = *maybeActivity.(*[]*Activity) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &activityR{} + } + if !queries.IsNil(object.BaoEndpoint) { + args = append(args, object.BaoEndpoint) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &activityR{} + } + + for _, a := range args { + if queries.Equal(a, obj.BaoEndpoint) { + continue Outer + } + } + + if !queries.IsNil(obj.BaoEndpoint) { + args = append(args, obj.BaoEndpoint) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`bioassay_ontology`), + qm.WhereIn(`bioassay_ontology.bao_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load BioassayOntology") + } + + var resultSlice []*BioassayOntology + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice BioassayOntology") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for bioassay_ontology") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for bioassay_ontology") + } + + if len(activityAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.BaoEndpointBioassayOntology = foreign + if foreign.R == nil { + foreign.R = &bioassayOntologyR{} + } + foreign.R.BaoEndpointActivities = append(foreign.R.BaoEndpointActivities, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.BaoEndpoint, foreign.BaoID) { + local.R.BaoEndpointBioassayOntology = foreign + if foreign.R == nil { + foreign.R = &bioassayOntologyR{} + } + foreign.R.BaoEndpointActivities = append(foreign.R.BaoEndpointActivities, local) + break + } + } + } + + return nil +} + +// LoadAssay allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (activityL) LoadAssay(ctx context.Context, e boil.ContextExecutor, singular bool, maybeActivity interface{}, mods queries.Applicator) error { + var slice []*Activity + var object *Activity + + if singular { + object = maybeActivity.(*Activity) + } else { + slice = *maybeActivity.(*[]*Activity) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &activityR{} + } + args = append(args, object.AssayID) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &activityR{} + } + + for _, a := range args { + if a == obj.AssayID { + continue Outer + } + } + + args = append(args, obj.AssayID) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`assays`), + qm.WhereIn(`assays.assay_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load Assay") + } + + var resultSlice []*Assay + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice Assay") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for assays") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for assays") + } + + if len(activityAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Assay = foreign + if foreign.R == nil { + foreign.R = &assayR{} + } + foreign.R.Activities = append(foreign.R.Activities, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.AssayID == foreign.AssayID { + local.R.Assay = foreign + if foreign.R == nil { + foreign.R = &assayR{} + } + foreign.R.Activities = append(foreign.R.Activities, local) + break + } + } + } + + return nil +} + +// LoadLigandEff allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-1 relationship. +func (activityL) LoadLigandEff(ctx context.Context, e boil.ContextExecutor, singular bool, maybeActivity interface{}, mods queries.Applicator) error { + var slice []*Activity + var object *Activity + + if singular { + object = maybeActivity.(*Activity) + } else { + slice = *maybeActivity.(*[]*Activity) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &activityR{} + } + args = append(args, object.ActivityID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &activityR{} + } + + for _, a := range args { + if a == obj.ActivityID { + continue Outer + } + } + + args = append(args, obj.ActivityID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`ligand_eff`), + qm.WhereIn(`ligand_eff.activity_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load LigandEff") + } + + var resultSlice []*LigandEff + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice LigandEff") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for ligand_eff") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for ligand_eff") + } + + if len(activityAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.LigandEff = foreign + if foreign.R == nil { + foreign.R = &ligandEffR{} + } + foreign.R.Activity = object + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.ActivityID == foreign.ActivityID { + local.R.LigandEff = foreign + if foreign.R == nil { + foreign.R = &ligandEffR{} + } + foreign.R.Activity = local + break + } + } + } + + return nil +} + +// LoadActivityProperties allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (activityL) LoadActivityProperties(ctx context.Context, e boil.ContextExecutor, singular bool, maybeActivity interface{}, mods queries.Applicator) error { + var slice []*Activity + var object *Activity + + if singular { + object = maybeActivity.(*Activity) + } else { + slice = *maybeActivity.(*[]*Activity) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &activityR{} + } + args = append(args, object.ActivityID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &activityR{} + } + + for _, a := range args { + if a == obj.ActivityID { + continue Outer + } + } + + args = append(args, obj.ActivityID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`activity_properties`), + qm.WhereIn(`activity_properties.activity_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load activity_properties") + } + + var resultSlice []*ActivityProperty + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice activity_properties") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on activity_properties") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for activity_properties") + } + + if len(activityPropertyAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.ActivityProperties = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &activityPropertyR{} + } + foreign.R.Activity = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.ActivityID == foreign.ActivityID { + local.R.ActivityProperties = append(local.R.ActivityProperties, foreign) + if foreign.R == nil { + foreign.R = &activityPropertyR{} + } + foreign.R.Activity = local + break + } + } + } + + return nil +} + +// LoadActivitySuppMaps allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (activityL) LoadActivitySuppMaps(ctx context.Context, e boil.ContextExecutor, singular bool, maybeActivity interface{}, mods queries.Applicator) error { + var slice []*Activity + var object *Activity + + if singular { + object = maybeActivity.(*Activity) + } else { + slice = *maybeActivity.(*[]*Activity) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &activityR{} + } + args = append(args, object.ActivityID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &activityR{} + } + + for _, a := range args { + if a == obj.ActivityID { + continue Outer + } + } + + args = append(args, obj.ActivityID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`activity_supp_map`), + qm.WhereIn(`activity_supp_map.activity_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load activity_supp_map") + } + + var resultSlice []*ActivitySuppMap + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice activity_supp_map") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on activity_supp_map") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for activity_supp_map") + } + + if len(activitySuppMapAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.ActivitySuppMaps = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &activitySuppMapR{} + } + foreign.R.Activity = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.ActivityID == foreign.ActivityID { + local.R.ActivitySuppMaps = append(local.R.ActivitySuppMaps, foreign) + if foreign.R == nil { + foreign.R = &activitySuppMapR{} + } + foreign.R.Activity = local + break + } + } + } + + return nil +} + +// LoadPredictedBindingDomains allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (activityL) LoadPredictedBindingDomains(ctx context.Context, e boil.ContextExecutor, singular bool, maybeActivity interface{}, mods queries.Applicator) error { + var slice []*Activity + var object *Activity + + if singular { + object = maybeActivity.(*Activity) + } else { + slice = *maybeActivity.(*[]*Activity) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &activityR{} + } + args = append(args, object.ActivityID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &activityR{} + } + + for _, a := range args { + if queries.Equal(a, obj.ActivityID) { + continue Outer + } + } + + args = append(args, obj.ActivityID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`predicted_binding_domains`), + qm.WhereIn(`predicted_binding_domains.activity_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load predicted_binding_domains") + } + + var resultSlice []*PredictedBindingDomain + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice predicted_binding_domains") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on predicted_binding_domains") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for predicted_binding_domains") + } + + if len(predictedBindingDomainAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.PredictedBindingDomains = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &predictedBindingDomainR{} + } + foreign.R.Activity = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.ActivityID, foreign.ActivityID) { + local.R.PredictedBindingDomains = append(local.R.PredictedBindingDomains, foreign) + if foreign.R == nil { + foreign.R = &predictedBindingDomainR{} + } + foreign.R.Activity = local + break + } + } + } + + return nil +} + +// SetDataValidityCommentDataValidityLookup of the activity to the related item. +// Sets o.R.DataValidityCommentDataValidityLookup to related. +// Adds o to related.R.DataValidityCommentActivities. +func (o *Activity) SetDataValidityCommentDataValidityLookup(ctx context.Context, exec boil.ContextExecutor, insert bool, related *DataValidityLookup) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"activities\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"data_validity_comment"}), + strmangle.WhereClause("\"", "\"", 0, activityPrimaryKeyColumns), + ) + values := []interface{}{related.DataValidityComment, o.ActivityID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.DataValidityComment, related.DataValidityComment) + if o.R == nil { + o.R = &activityR{ + DataValidityCommentDataValidityLookup: related, + } + } else { + o.R.DataValidityCommentDataValidityLookup = related + } + + if related.R == nil { + related.R = &dataValidityLookupR{ + DataValidityCommentActivities: ActivitySlice{o}, + } + } else { + related.R.DataValidityCommentActivities = append(related.R.DataValidityCommentActivities, o) + } + + return nil +} + +// RemoveDataValidityCommentDataValidityLookup relationship. +// Sets o.R.DataValidityCommentDataValidityLookup to nil. +// Removes o from all passed in related items' relationships struct. +func (o *Activity) RemoveDataValidityCommentDataValidityLookup(ctx context.Context, exec boil.ContextExecutor, related *DataValidityLookup) error { + var err error + + queries.SetScanner(&o.DataValidityComment, nil) + if _, err = o.Update(ctx, exec, boil.Whitelist("data_validity_comment")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.DataValidityCommentDataValidityLookup = nil + } + if related == nil || related.R == nil { + return nil + } + + for i, ri := range related.R.DataValidityCommentActivities { + if queries.Equal(o.DataValidityComment, ri.DataValidityComment) { + continue + } + + ln := len(related.R.DataValidityCommentActivities) + if ln > 1 && i < ln-1 { + related.R.DataValidityCommentActivities[i] = related.R.DataValidityCommentActivities[ln-1] + } + related.R.DataValidityCommentActivities = related.R.DataValidityCommentActivities[:ln-1] + break + } + return nil +} + +// SetSRC of the activity to the related item. +// Sets o.R.SRC to related. +// Adds o to related.R.SRCActivities. +func (o *Activity) SetSRC(ctx context.Context, exec boil.ContextExecutor, insert bool, related *Source) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"activities\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"src_id"}), + strmangle.WhereClause("\"", "\"", 0, activityPrimaryKeyColumns), + ) + values := []interface{}{related.SRCID, o.ActivityID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.SRCID, related.SRCID) + if o.R == nil { + o.R = &activityR{ + SRC: related, + } + } else { + o.R.SRC = related + } + + if related.R == nil { + related.R = &sourceR{ + SRCActivities: ActivitySlice{o}, + } + } else { + related.R.SRCActivities = append(related.R.SRCActivities, o) + } + + return nil +} + +// RemoveSRC relationship. +// Sets o.R.SRC to nil. +// Removes o from all passed in related items' relationships struct. +func (o *Activity) RemoveSRC(ctx context.Context, exec boil.ContextExecutor, related *Source) error { + var err error + + queries.SetScanner(&o.SRCID, nil) + if _, err = o.Update(ctx, exec, boil.Whitelist("src_id")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.SRC = nil + } + if related == nil || related.R == nil { + return nil + } + + for i, ri := range related.R.SRCActivities { + if queries.Equal(o.SRCID, ri.SRCID) { + continue + } + + ln := len(related.R.SRCActivities) + if ln > 1 && i < ln-1 { + related.R.SRCActivities[i] = related.R.SRCActivities[ln-1] + } + related.R.SRCActivities = related.R.SRCActivities[:ln-1] + break + } + return nil +} + +// SetRecord of the activity to the related item. +// Sets o.R.Record to related. +// Adds o to related.R.RecordActivities. +func (o *Activity) SetRecord(ctx context.Context, exec boil.ContextExecutor, insert bool, related *CompoundRecord) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"activities\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"record_id"}), + strmangle.WhereClause("\"", "\"", 0, activityPrimaryKeyColumns), + ) + values := []interface{}{related.RecordID, o.ActivityID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.RecordID = related.RecordID + if o.R == nil { + o.R = &activityR{ + Record: related, + } + } else { + o.R.Record = related + } + + if related.R == nil { + related.R = &compoundRecordR{ + RecordActivities: ActivitySlice{o}, + } + } else { + related.R.RecordActivities = append(related.R.RecordActivities, o) + } + + return nil +} + +// SetMolregnoMoleculeDictionary of the activity to the related item. +// Sets o.R.MolregnoMoleculeDictionary to related. +// Adds o to related.R.MolregnoActivities. +func (o *Activity) SetMolregnoMoleculeDictionary(ctx context.Context, exec boil.ContextExecutor, insert bool, related *MoleculeDictionary) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"activities\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"molregno"}), + strmangle.WhereClause("\"", "\"", 0, activityPrimaryKeyColumns), + ) + values := []interface{}{related.Molregno, o.ActivityID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.Molregno, related.Molregno) + if o.R == nil { + o.R = &activityR{ + MolregnoMoleculeDictionary: related, + } + } else { + o.R.MolregnoMoleculeDictionary = related + } + + if related.R == nil { + related.R = &moleculeDictionaryR{ + MolregnoActivities: ActivitySlice{o}, + } + } else { + related.R.MolregnoActivities = append(related.R.MolregnoActivities, o) + } + + return nil +} + +// RemoveMolregnoMoleculeDictionary relationship. +// Sets o.R.MolregnoMoleculeDictionary to nil. +// Removes o from all passed in related items' relationships struct. +func (o *Activity) RemoveMolregnoMoleculeDictionary(ctx context.Context, exec boil.ContextExecutor, related *MoleculeDictionary) error { + var err error + + queries.SetScanner(&o.Molregno, nil) + if _, err = o.Update(ctx, exec, boil.Whitelist("molregno")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.MolregnoMoleculeDictionary = nil + } + if related == nil || related.R == nil { + return nil + } + + for i, ri := range related.R.MolregnoActivities { + if queries.Equal(o.Molregno, ri.Molregno) { + continue + } + + ln := len(related.R.MolregnoActivities) + if ln > 1 && i < ln-1 { + related.R.MolregnoActivities[i] = related.R.MolregnoActivities[ln-1] + } + related.R.MolregnoActivities = related.R.MolregnoActivities[:ln-1] + break + } + return nil +} + +// SetDoc of the activity to the related item. +// Sets o.R.Doc to related. +// Adds o to related.R.Activities. +func (o *Activity) SetDoc(ctx context.Context, exec boil.ContextExecutor, insert bool, related *Doc) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"activities\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"doc_id"}), + strmangle.WhereClause("\"", "\"", 0, activityPrimaryKeyColumns), + ) + values := []interface{}{related.DocID, o.ActivityID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.DocID, related.DocID) + if o.R == nil { + o.R = &activityR{ + Doc: related, + } + } else { + o.R.Doc = related + } + + if related.R == nil { + related.R = &docR{ + Activities: ActivitySlice{o}, + } + } else { + related.R.Activities = append(related.R.Activities, o) + } + + return nil +} + +// RemoveDoc relationship. +// Sets o.R.Doc to nil. +// Removes o from all passed in related items' relationships struct. +func (o *Activity) RemoveDoc(ctx context.Context, exec boil.ContextExecutor, related *Doc) error { + var err error + + queries.SetScanner(&o.DocID, nil) + if _, err = o.Update(ctx, exec, boil.Whitelist("doc_id")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.Doc = nil + } + if related == nil || related.R == nil { + return nil + } + + for i, ri := range related.R.Activities { + if queries.Equal(o.DocID, ri.DocID) { + continue + } + + ln := len(related.R.Activities) + if ln > 1 && i < ln-1 { + related.R.Activities[i] = related.R.Activities[ln-1] + } + related.R.Activities = related.R.Activities[:ln-1] + break + } + return nil +} + +// SetBaoEndpointBioassayOntology of the activity to the related item. +// Sets o.R.BaoEndpointBioassayOntology to related. +// Adds o to related.R.BaoEndpointActivities. +func (o *Activity) SetBaoEndpointBioassayOntology(ctx context.Context, exec boil.ContextExecutor, insert bool, related *BioassayOntology) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"activities\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"bao_endpoint"}), + strmangle.WhereClause("\"", "\"", 0, activityPrimaryKeyColumns), + ) + values := []interface{}{related.BaoID, o.ActivityID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.BaoEndpoint, related.BaoID) + if o.R == nil { + o.R = &activityR{ + BaoEndpointBioassayOntology: related, + } + } else { + o.R.BaoEndpointBioassayOntology = related + } + + if related.R == nil { + related.R = &bioassayOntologyR{ + BaoEndpointActivities: ActivitySlice{o}, + } + } else { + related.R.BaoEndpointActivities = append(related.R.BaoEndpointActivities, o) + } + + return nil +} + +// RemoveBaoEndpointBioassayOntology relationship. +// Sets o.R.BaoEndpointBioassayOntology to nil. +// Removes o from all passed in related items' relationships struct. +func (o *Activity) RemoveBaoEndpointBioassayOntology(ctx context.Context, exec boil.ContextExecutor, related *BioassayOntology) error { + var err error + + queries.SetScanner(&o.BaoEndpoint, nil) + if _, err = o.Update(ctx, exec, boil.Whitelist("bao_endpoint")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.BaoEndpointBioassayOntology = nil + } + if related == nil || related.R == nil { + return nil + } + + for i, ri := range related.R.BaoEndpointActivities { + if queries.Equal(o.BaoEndpoint, ri.BaoEndpoint) { + continue + } + + ln := len(related.R.BaoEndpointActivities) + if ln > 1 && i < ln-1 { + related.R.BaoEndpointActivities[i] = related.R.BaoEndpointActivities[ln-1] + } + related.R.BaoEndpointActivities = related.R.BaoEndpointActivities[:ln-1] + break + } + return nil +} + +// SetAssay of the activity to the related item. +// Sets o.R.Assay to related. +// Adds o to related.R.Activities. +func (o *Activity) SetAssay(ctx context.Context, exec boil.ContextExecutor, insert bool, related *Assay) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"activities\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"assay_id"}), + strmangle.WhereClause("\"", "\"", 0, activityPrimaryKeyColumns), + ) + values := []interface{}{related.AssayID, o.ActivityID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.AssayID = related.AssayID + if o.R == nil { + o.R = &activityR{ + Assay: related, + } + } else { + o.R.Assay = related + } + + if related.R == nil { + related.R = &assayR{ + Activities: ActivitySlice{o}, + } + } else { + related.R.Activities = append(related.R.Activities, o) + } + + return nil +} + +// SetLigandEff of the activity to the related item. +// Sets o.R.LigandEff to related. +// Adds o to related.R.Activity. +func (o *Activity) SetLigandEff(ctx context.Context, exec boil.ContextExecutor, insert bool, related *LigandEff) error { + var err error + + if insert { + related.ActivityID = o.ActivityID + + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"ligand_eff\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"activity_id"}), + strmangle.WhereClause("\"", "\"", 0, ligandEffPrimaryKeyColumns), + ) + values := []interface{}{o.ActivityID, related.ActivityID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + related.ActivityID = o.ActivityID + } + + if o.R == nil { + o.R = &activityR{ + LigandEff: related, + } + } else { + o.R.LigandEff = related + } + + if related.R == nil { + related.R = &ligandEffR{ + Activity: o, + } + } else { + related.R.Activity = o + } + return nil +} + +// AddActivityProperties adds the given related objects to the existing relationships +// of the activity, optionally inserting them as new records. +// Appends related to o.R.ActivityProperties. +// Sets related.R.Activity appropriately. +func (o *Activity) AddActivityProperties(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*ActivityProperty) error { + var err error + for _, rel := range related { + if insert { + rel.ActivityID = o.ActivityID + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"activity_properties\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"activity_id"}), + strmangle.WhereClause("\"", "\"", 0, activityPropertyPrimaryKeyColumns), + ) + values := []interface{}{o.ActivityID, rel.ApID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.ActivityID = o.ActivityID + } + } + + if o.R == nil { + o.R = &activityR{ + ActivityProperties: related, + } + } else { + o.R.ActivityProperties = append(o.R.ActivityProperties, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &activityPropertyR{ + Activity: o, + } + } else { + rel.R.Activity = o + } + } + return nil +} + +// AddActivitySuppMaps adds the given related objects to the existing relationships +// of the activity, optionally inserting them as new records. +// Appends related to o.R.ActivitySuppMaps. +// Sets related.R.Activity appropriately. +func (o *Activity) AddActivitySuppMaps(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*ActivitySuppMap) error { + var err error + for _, rel := range related { + if insert { + rel.ActivityID = o.ActivityID + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"activity_supp_map\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"activity_id"}), + strmangle.WhereClause("\"", "\"", 0, activitySuppMapPrimaryKeyColumns), + ) + values := []interface{}{o.ActivityID, rel.ActsmID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.ActivityID = o.ActivityID + } + } + + if o.R == nil { + o.R = &activityR{ + ActivitySuppMaps: related, + } + } else { + o.R.ActivitySuppMaps = append(o.R.ActivitySuppMaps, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &activitySuppMapR{ + Activity: o, + } + } else { + rel.R.Activity = o + } + } + return nil +} + +// AddPredictedBindingDomains adds the given related objects to the existing relationships +// of the activity, optionally inserting them as new records. +// Appends related to o.R.PredictedBindingDomains. +// Sets related.R.Activity appropriately. +func (o *Activity) AddPredictedBindingDomains(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*PredictedBindingDomain) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.ActivityID, o.ActivityID) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"predicted_binding_domains\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"activity_id"}), + strmangle.WhereClause("\"", "\"", 0, predictedBindingDomainPrimaryKeyColumns), + ) + values := []interface{}{o.ActivityID, rel.PredbindID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.ActivityID, o.ActivityID) + } + } + + if o.R == nil { + o.R = &activityR{ + PredictedBindingDomains: related, + } + } else { + o.R.PredictedBindingDomains = append(o.R.PredictedBindingDomains, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &predictedBindingDomainR{ + Activity: o, + } + } else { + rel.R.Activity = o + } + } + return nil +} + +// SetPredictedBindingDomains removes all previously related items of the +// activity replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.Activity's PredictedBindingDomains accordingly. +// Replaces o.R.PredictedBindingDomains with related. +// Sets related.R.Activity's PredictedBindingDomains accordingly. +func (o *Activity) SetPredictedBindingDomains(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*PredictedBindingDomain) error { + query := "update \"predicted_binding_domains\" set \"activity_id\" = null where \"activity_id\" = ?" + values := []interface{}{o.ActivityID} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + if o.R != nil { + for _, rel := range o.R.PredictedBindingDomains { + queries.SetScanner(&rel.ActivityID, nil) + if rel.R == nil { + continue + } + + rel.R.Activity = nil + } + o.R.PredictedBindingDomains = nil + } + + return o.AddPredictedBindingDomains(ctx, exec, insert, related...) +} + +// RemovePredictedBindingDomains relationships from objects passed in. +// Removes related items from R.PredictedBindingDomains (uses pointer comparison, removal does not keep order) +// Sets related.R.Activity. +func (o *Activity) RemovePredictedBindingDomains(ctx context.Context, exec boil.ContextExecutor, related ...*PredictedBindingDomain) error { + if len(related) == 0 { + return nil + } + + var err error + for _, rel := range related { + queries.SetScanner(&rel.ActivityID, nil) + if rel.R != nil { + rel.R.Activity = nil + } + if _, err = rel.Update(ctx, exec, boil.Whitelist("activity_id")); err != nil { + return err + } + } + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.PredictedBindingDomains { + if rel != ri { + continue + } + + ln := len(o.R.PredictedBindingDomains) + if ln > 1 && i < ln-1 { + o.R.PredictedBindingDomains[i] = o.R.PredictedBindingDomains[ln-1] + } + o.R.PredictedBindingDomains = o.R.PredictedBindingDomains[:ln-1] + break + } + } + + return nil +} + +// Activities retrieves all the records using an executor. +func Activities(mods ...qm.QueryMod) activityQuery { + mods = append(mods, qm.From("\"activities\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"activities\".*"}) + } + + return activityQuery{q} +} + +// FindActivity retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindActivity(ctx context.Context, exec boil.ContextExecutor, activityID int64, selectCols ...string) (*Activity, error) { + activityObj := &Activity{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"activities\" where \"activity_id\"=?", sel, + ) + + q := queries.Raw(query, activityID) + + err := q.Bind(ctx, exec, activityObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from activities") + } + + if err = activityObj.doAfterSelectHooks(ctx, exec); err != nil { + return activityObj, err + } + + return activityObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *Activity) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no activities provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(activityColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + activityInsertCacheMut.RLock() + cache, cached := activityInsertCache[key] + activityInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + activityAllColumns, + activityColumnsWithDefault, + activityColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(activityType, activityMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(activityType, activityMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"activities\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"activities\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into activities") + } + + if !cached { + activityInsertCacheMut.Lock() + activityInsertCache[key] = cache + activityInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the Activity. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *Activity) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + activityUpdateCacheMut.RLock() + cache, cached := activityUpdateCache[key] + activityUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + activityAllColumns, + activityPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update activities, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"activities\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, activityPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(activityType, activityMapping, append(wl, activityPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update activities row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for activities") + } + + if !cached { + activityUpdateCacheMut.Lock() + activityUpdateCache[key] = cache + activityUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q activityQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for activities") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for activities") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o ActivitySlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), activityPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"activities\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, activityPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in activity slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all activity") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *Activity) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no activities provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(activityColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + activityUpsertCacheMut.RLock() + cache, cached := activityUpsertCache[key] + activityUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + activityAllColumns, + activityColumnsWithDefault, + activityColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + activityAllColumns, + activityPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert activities, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(activityPrimaryKeyColumns)) + copy(conflict, activityPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"activities\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(activityType, activityMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(activityType, activityMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert activities") + } + + if !cached { + activityUpsertCacheMut.Lock() + activityUpsertCache[key] = cache + activityUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single Activity record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *Activity) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no Activity provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), activityPrimaryKeyMapping) + sql := "DELETE FROM \"activities\" WHERE \"activity_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from activities") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for activities") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q activityQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no activityQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from activities") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for activities") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o ActivitySlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(activityBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), activityPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"activities\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, activityPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from activity slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for activities") + } + + if len(activityAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *Activity) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindActivity(ctx, exec, o.ActivityID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *ActivitySlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := ActivitySlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), activityPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"activities\".* FROM \"activities\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, activityPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in ActivitySlice") + } + + *o = slice + + return nil +} + +// ActivityExists checks if the Activity row exists. +func ActivityExists(ctx context.Context, exec boil.ContextExecutor, activityID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"activities\" where \"activity_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, activityID) + } + row := exec.QueryRowContext(ctx, sql, activityID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if activities exists") + } + + return exists, nil +} diff --git a/models/activity_properties.go b/models/activity_properties.go new file mode 100644 index 0000000..0e3e09a --- /dev/null +++ b/models/activity_properties.go @@ -0,0 +1,1171 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/sqlboiler/v4/types" + "github.com/volatiletech/strmangle" +) + +// ActivityProperty is an object representing the database table. +type ActivityProperty struct { + ApID int64 `boil:"ap_id" json:"ap_id" toml:"ap_id" yaml:"ap_id"` + ActivityID int64 `boil:"activity_id" json:"activity_id" toml:"activity_id" yaml:"activity_id"` + Type string `boil:"type" json:"type" toml:"type" yaml:"type"` + Relation null.String `boil:"relation" json:"relation,omitempty" toml:"relation" yaml:"relation,omitempty"` + Value types.NullDecimal `boil:"value" json:"value,omitempty" toml:"value" yaml:"value,omitempty"` + Units null.String `boil:"units" json:"units,omitempty" toml:"units" yaml:"units,omitempty"` + TextValue null.String `boil:"text_value" json:"text_value,omitempty" toml:"text_value" yaml:"text_value,omitempty"` + StandardType null.String `boil:"standard_type" json:"standard_type,omitempty" toml:"standard_type" yaml:"standard_type,omitempty"` + StandardRelation null.String `boil:"standard_relation" json:"standard_relation,omitempty" toml:"standard_relation" yaml:"standard_relation,omitempty"` + StandardValue types.NullDecimal `boil:"standard_value" json:"standard_value,omitempty" toml:"standard_value" yaml:"standard_value,omitempty"` + StandardUnits null.String `boil:"standard_units" json:"standard_units,omitempty" toml:"standard_units" yaml:"standard_units,omitempty"` + StandardTextValue null.String `boil:"standard_text_value" json:"standard_text_value,omitempty" toml:"standard_text_value" yaml:"standard_text_value,omitempty"` + Comments null.String `boil:"comments" json:"comments,omitempty" toml:"comments" yaml:"comments,omitempty"` + ResultFlag int16 `boil:"result_flag" json:"result_flag" toml:"result_flag" yaml:"result_flag"` + + R *activityPropertyR `boil:"-" json:"-" toml:"-" yaml:"-"` + L activityPropertyL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var ActivityPropertyColumns = struct { + ApID string + ActivityID string + Type string + Relation string + Value string + Units string + TextValue string + StandardType string + StandardRelation string + StandardValue string + StandardUnits string + StandardTextValue string + Comments string + ResultFlag string +}{ + ApID: "ap_id", + ActivityID: "activity_id", + Type: "type", + Relation: "relation", + Value: "value", + Units: "units", + TextValue: "text_value", + StandardType: "standard_type", + StandardRelation: "standard_relation", + StandardValue: "standard_value", + StandardUnits: "standard_units", + StandardTextValue: "standard_text_value", + Comments: "comments", + ResultFlag: "result_flag", +} + +var ActivityPropertyTableColumns = struct { + ApID string + ActivityID string + Type string + Relation string + Value string + Units string + TextValue string + StandardType string + StandardRelation string + StandardValue string + StandardUnits string + StandardTextValue string + Comments string + ResultFlag string +}{ + ApID: "activity_properties.ap_id", + ActivityID: "activity_properties.activity_id", + Type: "activity_properties.type", + Relation: "activity_properties.relation", + Value: "activity_properties.value", + Units: "activity_properties.units", + TextValue: "activity_properties.text_value", + StandardType: "activity_properties.standard_type", + StandardRelation: "activity_properties.standard_relation", + StandardValue: "activity_properties.standard_value", + StandardUnits: "activity_properties.standard_units", + StandardTextValue: "activity_properties.standard_text_value", + Comments: "activity_properties.comments", + ResultFlag: "activity_properties.result_flag", +} + +// Generated where + +type whereHelperint16 struct{ field string } + +func (w whereHelperint16) EQ(x int16) qm.QueryMod { return qmhelper.Where(w.field, qmhelper.EQ, x) } +func (w whereHelperint16) NEQ(x int16) qm.QueryMod { return qmhelper.Where(w.field, qmhelper.NEQ, x) } +func (w whereHelperint16) LT(x int16) qm.QueryMod { return qmhelper.Where(w.field, qmhelper.LT, x) } +func (w whereHelperint16) LTE(x int16) qm.QueryMod { return qmhelper.Where(w.field, qmhelper.LTE, x) } +func (w whereHelperint16) GT(x int16) qm.QueryMod { return qmhelper.Where(w.field, qmhelper.GT, x) } +func (w whereHelperint16) GTE(x int16) qm.QueryMod { return qmhelper.Where(w.field, qmhelper.GTE, x) } +func (w whereHelperint16) IN(slice []int16) qm.QueryMod { + values := make([]interface{}, 0, len(slice)) + for _, value := range slice { + values = append(values, value) + } + return qm.WhereIn(fmt.Sprintf("%s IN ?", w.field), values...) +} +func (w whereHelperint16) NIN(slice []int16) qm.QueryMod { + values := make([]interface{}, 0, len(slice)) + for _, value := range slice { + values = append(values, value) + } + return qm.WhereNotIn(fmt.Sprintf("%s NOT IN ?", w.field), values...) +} + +var ActivityPropertyWhere = struct { + ApID whereHelperint64 + ActivityID whereHelperint64 + Type whereHelperstring + Relation whereHelpernull_String + Value whereHelpertypes_NullDecimal + Units whereHelpernull_String + TextValue whereHelpernull_String + StandardType whereHelpernull_String + StandardRelation whereHelpernull_String + StandardValue whereHelpertypes_NullDecimal + StandardUnits whereHelpernull_String + StandardTextValue whereHelpernull_String + Comments whereHelpernull_String + ResultFlag whereHelperint16 +}{ + ApID: whereHelperint64{field: "\"activity_properties\".\"ap_id\""}, + ActivityID: whereHelperint64{field: "\"activity_properties\".\"activity_id\""}, + Type: whereHelperstring{field: "\"activity_properties\".\"type\""}, + Relation: whereHelpernull_String{field: "\"activity_properties\".\"relation\""}, + Value: whereHelpertypes_NullDecimal{field: "\"activity_properties\".\"value\""}, + Units: whereHelpernull_String{field: "\"activity_properties\".\"units\""}, + TextValue: whereHelpernull_String{field: "\"activity_properties\".\"text_value\""}, + StandardType: whereHelpernull_String{field: "\"activity_properties\".\"standard_type\""}, + StandardRelation: whereHelpernull_String{field: "\"activity_properties\".\"standard_relation\""}, + StandardValue: whereHelpertypes_NullDecimal{field: "\"activity_properties\".\"standard_value\""}, + StandardUnits: whereHelpernull_String{field: "\"activity_properties\".\"standard_units\""}, + StandardTextValue: whereHelpernull_String{field: "\"activity_properties\".\"standard_text_value\""}, + Comments: whereHelpernull_String{field: "\"activity_properties\".\"comments\""}, + ResultFlag: whereHelperint16{field: "\"activity_properties\".\"result_flag\""}, +} + +// ActivityPropertyRels is where relationship names are stored. +var ActivityPropertyRels = struct { + Activity string +}{ + Activity: "Activity", +} + +// activityPropertyR is where relationships are stored. +type activityPropertyR struct { + Activity *Activity `boil:"Activity" json:"Activity" toml:"Activity" yaml:"Activity"` +} + +// NewStruct creates a new relationship struct +func (*activityPropertyR) NewStruct() *activityPropertyR { + return &activityPropertyR{} +} + +func (r *activityPropertyR) GetActivity() *Activity { + if r == nil { + return nil + } + return r.Activity +} + +// activityPropertyL is where Load methods for each relationship are stored. +type activityPropertyL struct{} + +var ( + activityPropertyAllColumns = []string{"ap_id", "activity_id", "type", "relation", "value", "units", "text_value", "standard_type", "standard_relation", "standard_value", "standard_units", "standard_text_value", "comments", "result_flag"} + activityPropertyColumnsWithoutDefault = []string{"ap_id", "activity_id", "type", "result_flag"} + activityPropertyColumnsWithDefault = []string{"relation", "value", "units", "text_value", "standard_type", "standard_relation", "standard_value", "standard_units", "standard_text_value", "comments"} + activityPropertyPrimaryKeyColumns = []string{"ap_id"} + activityPropertyGeneratedColumns = []string{} +) + +type ( + // ActivityPropertySlice is an alias for a slice of pointers to ActivityProperty. + // This should almost always be used instead of []ActivityProperty. + ActivityPropertySlice []*ActivityProperty + // ActivityPropertyHook is the signature for custom ActivityProperty hook methods + ActivityPropertyHook func(context.Context, boil.ContextExecutor, *ActivityProperty) error + + activityPropertyQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + activityPropertyType = reflect.TypeOf(&ActivityProperty{}) + activityPropertyMapping = queries.MakeStructMapping(activityPropertyType) + activityPropertyPrimaryKeyMapping, _ = queries.BindMapping(activityPropertyType, activityPropertyMapping, activityPropertyPrimaryKeyColumns) + activityPropertyInsertCacheMut sync.RWMutex + activityPropertyInsertCache = make(map[string]insertCache) + activityPropertyUpdateCacheMut sync.RWMutex + activityPropertyUpdateCache = make(map[string]updateCache) + activityPropertyUpsertCacheMut sync.RWMutex + activityPropertyUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var activityPropertyAfterSelectHooks []ActivityPropertyHook + +var activityPropertyBeforeInsertHooks []ActivityPropertyHook +var activityPropertyAfterInsertHooks []ActivityPropertyHook + +var activityPropertyBeforeUpdateHooks []ActivityPropertyHook +var activityPropertyAfterUpdateHooks []ActivityPropertyHook + +var activityPropertyBeforeDeleteHooks []ActivityPropertyHook +var activityPropertyAfterDeleteHooks []ActivityPropertyHook + +var activityPropertyBeforeUpsertHooks []ActivityPropertyHook +var activityPropertyAfterUpsertHooks []ActivityPropertyHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *ActivityProperty) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activityPropertyAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *ActivityProperty) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activityPropertyBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *ActivityProperty) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activityPropertyAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *ActivityProperty) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activityPropertyBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *ActivityProperty) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activityPropertyAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *ActivityProperty) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activityPropertyBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *ActivityProperty) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activityPropertyAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *ActivityProperty) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activityPropertyBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *ActivityProperty) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activityPropertyAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddActivityPropertyHook registers your hook function for all future operations. +func AddActivityPropertyHook(hookPoint boil.HookPoint, activityPropertyHook ActivityPropertyHook) { + switch hookPoint { + case boil.AfterSelectHook: + activityPropertyAfterSelectHooks = append(activityPropertyAfterSelectHooks, activityPropertyHook) + case boil.BeforeInsertHook: + activityPropertyBeforeInsertHooks = append(activityPropertyBeforeInsertHooks, activityPropertyHook) + case boil.AfterInsertHook: + activityPropertyAfterInsertHooks = append(activityPropertyAfterInsertHooks, activityPropertyHook) + case boil.BeforeUpdateHook: + activityPropertyBeforeUpdateHooks = append(activityPropertyBeforeUpdateHooks, activityPropertyHook) + case boil.AfterUpdateHook: + activityPropertyAfterUpdateHooks = append(activityPropertyAfterUpdateHooks, activityPropertyHook) + case boil.BeforeDeleteHook: + activityPropertyBeforeDeleteHooks = append(activityPropertyBeforeDeleteHooks, activityPropertyHook) + case boil.AfterDeleteHook: + activityPropertyAfterDeleteHooks = append(activityPropertyAfterDeleteHooks, activityPropertyHook) + case boil.BeforeUpsertHook: + activityPropertyBeforeUpsertHooks = append(activityPropertyBeforeUpsertHooks, activityPropertyHook) + case boil.AfterUpsertHook: + activityPropertyAfterUpsertHooks = append(activityPropertyAfterUpsertHooks, activityPropertyHook) + } +} + +// One returns a single activityProperty record from the query. +func (q activityPropertyQuery) One(ctx context.Context, exec boil.ContextExecutor) (*ActivityProperty, error) { + o := &ActivityProperty{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for activity_properties") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all ActivityProperty records from the query. +func (q activityPropertyQuery) All(ctx context.Context, exec boil.ContextExecutor) (ActivityPropertySlice, error) { + var o []*ActivityProperty + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to ActivityProperty slice") + } + + if len(activityPropertyAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all ActivityProperty records in the query. +func (q activityPropertyQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count activity_properties rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q activityPropertyQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if activity_properties exists") + } + + return count > 0, nil +} + +// Activity pointed to by the foreign key. +func (o *ActivityProperty) Activity(mods ...qm.QueryMod) activityQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"activity_id\" = ?", o.ActivityID), + } + + queryMods = append(queryMods, mods...) + + return Activities(queryMods...) +} + +// LoadActivity allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (activityPropertyL) LoadActivity(ctx context.Context, e boil.ContextExecutor, singular bool, maybeActivityProperty interface{}, mods queries.Applicator) error { + var slice []*ActivityProperty + var object *ActivityProperty + + if singular { + object = maybeActivityProperty.(*ActivityProperty) + } else { + slice = *maybeActivityProperty.(*[]*ActivityProperty) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &activityPropertyR{} + } + args = append(args, object.ActivityID) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &activityPropertyR{} + } + + for _, a := range args { + if a == obj.ActivityID { + continue Outer + } + } + + args = append(args, obj.ActivityID) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`activities`), + qm.WhereIn(`activities.activity_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load Activity") + } + + var resultSlice []*Activity + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice Activity") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for activities") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for activities") + } + + if len(activityPropertyAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Activity = foreign + if foreign.R == nil { + foreign.R = &activityR{} + } + foreign.R.ActivityProperties = append(foreign.R.ActivityProperties, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.ActivityID == foreign.ActivityID { + local.R.Activity = foreign + if foreign.R == nil { + foreign.R = &activityR{} + } + foreign.R.ActivityProperties = append(foreign.R.ActivityProperties, local) + break + } + } + } + + return nil +} + +// SetActivity of the activityProperty to the related item. +// Sets o.R.Activity to related. +// Adds o to related.R.ActivityProperties. +func (o *ActivityProperty) SetActivity(ctx context.Context, exec boil.ContextExecutor, insert bool, related *Activity) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"activity_properties\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"activity_id"}), + strmangle.WhereClause("\"", "\"", 0, activityPropertyPrimaryKeyColumns), + ) + values := []interface{}{related.ActivityID, o.ApID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.ActivityID = related.ActivityID + if o.R == nil { + o.R = &activityPropertyR{ + Activity: related, + } + } else { + o.R.Activity = related + } + + if related.R == nil { + related.R = &activityR{ + ActivityProperties: ActivityPropertySlice{o}, + } + } else { + related.R.ActivityProperties = append(related.R.ActivityProperties, o) + } + + return nil +} + +// ActivityProperties retrieves all the records using an executor. +func ActivityProperties(mods ...qm.QueryMod) activityPropertyQuery { + mods = append(mods, qm.From("\"activity_properties\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"activity_properties\".*"}) + } + + return activityPropertyQuery{q} +} + +// FindActivityProperty retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindActivityProperty(ctx context.Context, exec boil.ContextExecutor, apID int64, selectCols ...string) (*ActivityProperty, error) { + activityPropertyObj := &ActivityProperty{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"activity_properties\" where \"ap_id\"=?", sel, + ) + + q := queries.Raw(query, apID) + + err := q.Bind(ctx, exec, activityPropertyObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from activity_properties") + } + + if err = activityPropertyObj.doAfterSelectHooks(ctx, exec); err != nil { + return activityPropertyObj, err + } + + return activityPropertyObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *ActivityProperty) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no activity_properties provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(activityPropertyColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + activityPropertyInsertCacheMut.RLock() + cache, cached := activityPropertyInsertCache[key] + activityPropertyInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + activityPropertyAllColumns, + activityPropertyColumnsWithDefault, + activityPropertyColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(activityPropertyType, activityPropertyMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(activityPropertyType, activityPropertyMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"activity_properties\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"activity_properties\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into activity_properties") + } + + if !cached { + activityPropertyInsertCacheMut.Lock() + activityPropertyInsertCache[key] = cache + activityPropertyInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the ActivityProperty. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *ActivityProperty) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + activityPropertyUpdateCacheMut.RLock() + cache, cached := activityPropertyUpdateCache[key] + activityPropertyUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + activityPropertyAllColumns, + activityPropertyPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update activity_properties, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"activity_properties\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, activityPropertyPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(activityPropertyType, activityPropertyMapping, append(wl, activityPropertyPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update activity_properties row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for activity_properties") + } + + if !cached { + activityPropertyUpdateCacheMut.Lock() + activityPropertyUpdateCache[key] = cache + activityPropertyUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q activityPropertyQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for activity_properties") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for activity_properties") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o ActivityPropertySlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), activityPropertyPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"activity_properties\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, activityPropertyPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in activityProperty slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all activityProperty") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *ActivityProperty) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no activity_properties provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(activityPropertyColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + activityPropertyUpsertCacheMut.RLock() + cache, cached := activityPropertyUpsertCache[key] + activityPropertyUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + activityPropertyAllColumns, + activityPropertyColumnsWithDefault, + activityPropertyColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + activityPropertyAllColumns, + activityPropertyPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert activity_properties, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(activityPropertyPrimaryKeyColumns)) + copy(conflict, activityPropertyPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"activity_properties\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(activityPropertyType, activityPropertyMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(activityPropertyType, activityPropertyMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert activity_properties") + } + + if !cached { + activityPropertyUpsertCacheMut.Lock() + activityPropertyUpsertCache[key] = cache + activityPropertyUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single ActivityProperty record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *ActivityProperty) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no ActivityProperty provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), activityPropertyPrimaryKeyMapping) + sql := "DELETE FROM \"activity_properties\" WHERE \"ap_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from activity_properties") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for activity_properties") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q activityPropertyQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no activityPropertyQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from activity_properties") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for activity_properties") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o ActivityPropertySlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(activityPropertyBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), activityPropertyPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"activity_properties\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, activityPropertyPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from activityProperty slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for activity_properties") + } + + if len(activityPropertyAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *ActivityProperty) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindActivityProperty(ctx, exec, o.ApID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *ActivityPropertySlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := ActivityPropertySlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), activityPropertyPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"activity_properties\".* FROM \"activity_properties\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, activityPropertyPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in ActivityPropertySlice") + } + + *o = slice + + return nil +} + +// ActivityPropertyExists checks if the ActivityProperty row exists. +func ActivityPropertyExists(ctx context.Context, exec boil.ContextExecutor, apID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"activity_properties\" where \"ap_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, apID) + } + row := exec.QueryRowContext(ctx, sql, apID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if activity_properties exists") + } + + return exists, nil +} diff --git a/models/activity_smid.go b/models/activity_smid.go new file mode 100644 index 0000000..138a35a --- /dev/null +++ b/models/activity_smid.go @@ -0,0 +1,1307 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// ActivitySmid is an object representing the database table. +type ActivitySmid struct { + Smid int64 `boil:"smid" json:"smid" toml:"smid" yaml:"smid"` + + R *activitySmidR `boil:"-" json:"-" toml:"-" yaml:"-"` + L activitySmidL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var ActivitySmidColumns = struct { + Smid string +}{ + Smid: "smid", +} + +var ActivitySmidTableColumns = struct { + Smid string +}{ + Smid: "activity_smid.smid", +} + +// Generated where + +var ActivitySmidWhere = struct { + Smid whereHelperint64 +}{ + Smid: whereHelperint64{field: "\"activity_smid\".\"smid\""}, +} + +// ActivitySmidRels is where relationship names are stored. +var ActivitySmidRels = struct { + SmidActivitySupps string + SmidActivitySuppMaps string +}{ + SmidActivitySupps: "SmidActivitySupps", + SmidActivitySuppMaps: "SmidActivitySuppMaps", +} + +// activitySmidR is where relationships are stored. +type activitySmidR struct { + SmidActivitySupps ActivitySuppSlice `boil:"SmidActivitySupps" json:"SmidActivitySupps" toml:"SmidActivitySupps" yaml:"SmidActivitySupps"` + SmidActivitySuppMaps ActivitySuppMapSlice `boil:"SmidActivitySuppMaps" json:"SmidActivitySuppMaps" toml:"SmidActivitySuppMaps" yaml:"SmidActivitySuppMaps"` +} + +// NewStruct creates a new relationship struct +func (*activitySmidR) NewStruct() *activitySmidR { + return &activitySmidR{} +} + +func (r *activitySmidR) GetSmidActivitySupps() ActivitySuppSlice { + if r == nil { + return nil + } + return r.SmidActivitySupps +} + +func (r *activitySmidR) GetSmidActivitySuppMaps() ActivitySuppMapSlice { + if r == nil { + return nil + } + return r.SmidActivitySuppMaps +} + +// activitySmidL is where Load methods for each relationship are stored. +type activitySmidL struct{} + +var ( + activitySmidAllColumns = []string{"smid"} + activitySmidColumnsWithoutDefault = []string{"smid"} + activitySmidColumnsWithDefault = []string{} + activitySmidPrimaryKeyColumns = []string{"smid"} + activitySmidGeneratedColumns = []string{} +) + +type ( + // ActivitySmidSlice is an alias for a slice of pointers to ActivitySmid. + // This should almost always be used instead of []ActivitySmid. + ActivitySmidSlice []*ActivitySmid + // ActivitySmidHook is the signature for custom ActivitySmid hook methods + ActivitySmidHook func(context.Context, boil.ContextExecutor, *ActivitySmid) error + + activitySmidQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + activitySmidType = reflect.TypeOf(&ActivitySmid{}) + activitySmidMapping = queries.MakeStructMapping(activitySmidType) + activitySmidPrimaryKeyMapping, _ = queries.BindMapping(activitySmidType, activitySmidMapping, activitySmidPrimaryKeyColumns) + activitySmidInsertCacheMut sync.RWMutex + activitySmidInsertCache = make(map[string]insertCache) + activitySmidUpdateCacheMut sync.RWMutex + activitySmidUpdateCache = make(map[string]updateCache) + activitySmidUpsertCacheMut sync.RWMutex + activitySmidUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var activitySmidAfterSelectHooks []ActivitySmidHook + +var activitySmidBeforeInsertHooks []ActivitySmidHook +var activitySmidAfterInsertHooks []ActivitySmidHook + +var activitySmidBeforeUpdateHooks []ActivitySmidHook +var activitySmidAfterUpdateHooks []ActivitySmidHook + +var activitySmidBeforeDeleteHooks []ActivitySmidHook +var activitySmidAfterDeleteHooks []ActivitySmidHook + +var activitySmidBeforeUpsertHooks []ActivitySmidHook +var activitySmidAfterUpsertHooks []ActivitySmidHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *ActivitySmid) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activitySmidAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *ActivitySmid) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activitySmidBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *ActivitySmid) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activitySmidAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *ActivitySmid) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activitySmidBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *ActivitySmid) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activitySmidAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *ActivitySmid) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activitySmidBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *ActivitySmid) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activitySmidAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *ActivitySmid) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activitySmidBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *ActivitySmid) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activitySmidAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddActivitySmidHook registers your hook function for all future operations. +func AddActivitySmidHook(hookPoint boil.HookPoint, activitySmidHook ActivitySmidHook) { + switch hookPoint { + case boil.AfterSelectHook: + activitySmidAfterSelectHooks = append(activitySmidAfterSelectHooks, activitySmidHook) + case boil.BeforeInsertHook: + activitySmidBeforeInsertHooks = append(activitySmidBeforeInsertHooks, activitySmidHook) + case boil.AfterInsertHook: + activitySmidAfterInsertHooks = append(activitySmidAfterInsertHooks, activitySmidHook) + case boil.BeforeUpdateHook: + activitySmidBeforeUpdateHooks = append(activitySmidBeforeUpdateHooks, activitySmidHook) + case boil.AfterUpdateHook: + activitySmidAfterUpdateHooks = append(activitySmidAfterUpdateHooks, activitySmidHook) + case boil.BeforeDeleteHook: + activitySmidBeforeDeleteHooks = append(activitySmidBeforeDeleteHooks, activitySmidHook) + case boil.AfterDeleteHook: + activitySmidAfterDeleteHooks = append(activitySmidAfterDeleteHooks, activitySmidHook) + case boil.BeforeUpsertHook: + activitySmidBeforeUpsertHooks = append(activitySmidBeforeUpsertHooks, activitySmidHook) + case boil.AfterUpsertHook: + activitySmidAfterUpsertHooks = append(activitySmidAfterUpsertHooks, activitySmidHook) + } +} + +// One returns a single activitySmid record from the query. +func (q activitySmidQuery) One(ctx context.Context, exec boil.ContextExecutor) (*ActivitySmid, error) { + o := &ActivitySmid{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for activity_smid") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all ActivitySmid records from the query. +func (q activitySmidQuery) All(ctx context.Context, exec boil.ContextExecutor) (ActivitySmidSlice, error) { + var o []*ActivitySmid + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to ActivitySmid slice") + } + + if len(activitySmidAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all ActivitySmid records in the query. +func (q activitySmidQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count activity_smid rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q activitySmidQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if activity_smid exists") + } + + return count > 0, nil +} + +// SmidActivitySupps retrieves all the activity_supp's ActivitySupps with an executor via smid column. +func (o *ActivitySmid) SmidActivitySupps(mods ...qm.QueryMod) activitySuppQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"activity_supp\".\"smid\"=?", o.Smid), + ) + + return ActivitySupps(queryMods...) +} + +// SmidActivitySuppMaps retrieves all the activity_supp_map's ActivitySuppMaps with an executor via smid column. +func (o *ActivitySmid) SmidActivitySuppMaps(mods ...qm.QueryMod) activitySuppMapQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"activity_supp_map\".\"smid\"=?", o.Smid), + ) + + return ActivitySuppMaps(queryMods...) +} + +// LoadSmidActivitySupps allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (activitySmidL) LoadSmidActivitySupps(ctx context.Context, e boil.ContextExecutor, singular bool, maybeActivitySmid interface{}, mods queries.Applicator) error { + var slice []*ActivitySmid + var object *ActivitySmid + + if singular { + object = maybeActivitySmid.(*ActivitySmid) + } else { + slice = *maybeActivitySmid.(*[]*ActivitySmid) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &activitySmidR{} + } + args = append(args, object.Smid) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &activitySmidR{} + } + + for _, a := range args { + if queries.Equal(a, obj.Smid) { + continue Outer + } + } + + args = append(args, obj.Smid) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`activity_supp`), + qm.WhereIn(`activity_supp.smid in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load activity_supp") + } + + var resultSlice []*ActivitySupp + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice activity_supp") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on activity_supp") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for activity_supp") + } + + if len(activitySuppAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.SmidActivitySupps = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &activitySuppR{} + } + foreign.R.SmidActivitySmid = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.Smid, foreign.Smid) { + local.R.SmidActivitySupps = append(local.R.SmidActivitySupps, foreign) + if foreign.R == nil { + foreign.R = &activitySuppR{} + } + foreign.R.SmidActivitySmid = local + break + } + } + } + + return nil +} + +// LoadSmidActivitySuppMaps allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (activitySmidL) LoadSmidActivitySuppMaps(ctx context.Context, e boil.ContextExecutor, singular bool, maybeActivitySmid interface{}, mods queries.Applicator) error { + var slice []*ActivitySmid + var object *ActivitySmid + + if singular { + object = maybeActivitySmid.(*ActivitySmid) + } else { + slice = *maybeActivitySmid.(*[]*ActivitySmid) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &activitySmidR{} + } + args = append(args, object.Smid) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &activitySmidR{} + } + + for _, a := range args { + if a == obj.Smid { + continue Outer + } + } + + args = append(args, obj.Smid) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`activity_supp_map`), + qm.WhereIn(`activity_supp_map.smid in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load activity_supp_map") + } + + var resultSlice []*ActivitySuppMap + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice activity_supp_map") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on activity_supp_map") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for activity_supp_map") + } + + if len(activitySuppMapAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.SmidActivitySuppMaps = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &activitySuppMapR{} + } + foreign.R.SmidActivitySmid = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.Smid == foreign.Smid { + local.R.SmidActivitySuppMaps = append(local.R.SmidActivitySuppMaps, foreign) + if foreign.R == nil { + foreign.R = &activitySuppMapR{} + } + foreign.R.SmidActivitySmid = local + break + } + } + } + + return nil +} + +// AddSmidActivitySupps adds the given related objects to the existing relationships +// of the activity_smid, optionally inserting them as new records. +// Appends related to o.R.SmidActivitySupps. +// Sets related.R.SmidActivitySmid appropriately. +func (o *ActivitySmid) AddSmidActivitySupps(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*ActivitySupp) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.Smid, o.Smid) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"activity_supp\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"smid"}), + strmangle.WhereClause("\"", "\"", 0, activitySuppPrimaryKeyColumns), + ) + values := []interface{}{o.Smid, rel.AsID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.Smid, o.Smid) + } + } + + if o.R == nil { + o.R = &activitySmidR{ + SmidActivitySupps: related, + } + } else { + o.R.SmidActivitySupps = append(o.R.SmidActivitySupps, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &activitySuppR{ + SmidActivitySmid: o, + } + } else { + rel.R.SmidActivitySmid = o + } + } + return nil +} + +// SetSmidActivitySupps removes all previously related items of the +// activity_smid replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.SmidActivitySmid's SmidActivitySupps accordingly. +// Replaces o.R.SmidActivitySupps with related. +// Sets related.R.SmidActivitySmid's SmidActivitySupps accordingly. +func (o *ActivitySmid) SetSmidActivitySupps(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*ActivitySupp) error { + query := "update \"activity_supp\" set \"smid\" = null where \"smid\" = ?" + values := []interface{}{o.Smid} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + if o.R != nil { + for _, rel := range o.R.SmidActivitySupps { + queries.SetScanner(&rel.Smid, nil) + if rel.R == nil { + continue + } + + rel.R.SmidActivitySmid = nil + } + o.R.SmidActivitySupps = nil + } + + return o.AddSmidActivitySupps(ctx, exec, insert, related...) +} + +// RemoveSmidActivitySupps relationships from objects passed in. +// Removes related items from R.SmidActivitySupps (uses pointer comparison, removal does not keep order) +// Sets related.R.SmidActivitySmid. +func (o *ActivitySmid) RemoveSmidActivitySupps(ctx context.Context, exec boil.ContextExecutor, related ...*ActivitySupp) error { + if len(related) == 0 { + return nil + } + + var err error + for _, rel := range related { + queries.SetScanner(&rel.Smid, nil) + if rel.R != nil { + rel.R.SmidActivitySmid = nil + } + if _, err = rel.Update(ctx, exec, boil.Whitelist("smid")); err != nil { + return err + } + } + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.SmidActivitySupps { + if rel != ri { + continue + } + + ln := len(o.R.SmidActivitySupps) + if ln > 1 && i < ln-1 { + o.R.SmidActivitySupps[i] = o.R.SmidActivitySupps[ln-1] + } + o.R.SmidActivitySupps = o.R.SmidActivitySupps[:ln-1] + break + } + } + + return nil +} + +// AddSmidActivitySuppMaps adds the given related objects to the existing relationships +// of the activity_smid, optionally inserting them as new records. +// Appends related to o.R.SmidActivitySuppMaps. +// Sets related.R.SmidActivitySmid appropriately. +func (o *ActivitySmid) AddSmidActivitySuppMaps(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*ActivitySuppMap) error { + var err error + for _, rel := range related { + if insert { + rel.Smid = o.Smid + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"activity_supp_map\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"smid"}), + strmangle.WhereClause("\"", "\"", 0, activitySuppMapPrimaryKeyColumns), + ) + values := []interface{}{o.Smid, rel.ActsmID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.Smid = o.Smid + } + } + + if o.R == nil { + o.R = &activitySmidR{ + SmidActivitySuppMaps: related, + } + } else { + o.R.SmidActivitySuppMaps = append(o.R.SmidActivitySuppMaps, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &activitySuppMapR{ + SmidActivitySmid: o, + } + } else { + rel.R.SmidActivitySmid = o + } + } + return nil +} + +// ActivitySmids retrieves all the records using an executor. +func ActivitySmids(mods ...qm.QueryMod) activitySmidQuery { + mods = append(mods, qm.From("\"activity_smid\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"activity_smid\".*"}) + } + + return activitySmidQuery{q} +} + +// FindActivitySmid retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindActivitySmid(ctx context.Context, exec boil.ContextExecutor, smid int64, selectCols ...string) (*ActivitySmid, error) { + activitySmidObj := &ActivitySmid{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"activity_smid\" where \"smid\"=?", sel, + ) + + q := queries.Raw(query, smid) + + err := q.Bind(ctx, exec, activitySmidObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from activity_smid") + } + + if err = activitySmidObj.doAfterSelectHooks(ctx, exec); err != nil { + return activitySmidObj, err + } + + return activitySmidObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *ActivitySmid) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no activity_smid provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(activitySmidColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + activitySmidInsertCacheMut.RLock() + cache, cached := activitySmidInsertCache[key] + activitySmidInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + activitySmidAllColumns, + activitySmidColumnsWithDefault, + activitySmidColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(activitySmidType, activitySmidMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(activitySmidType, activitySmidMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"activity_smid\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"activity_smid\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into activity_smid") + } + + if !cached { + activitySmidInsertCacheMut.Lock() + activitySmidInsertCache[key] = cache + activitySmidInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the ActivitySmid. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *ActivitySmid) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + activitySmidUpdateCacheMut.RLock() + cache, cached := activitySmidUpdateCache[key] + activitySmidUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + activitySmidAllColumns, + activitySmidPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update activity_smid, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"activity_smid\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, activitySmidPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(activitySmidType, activitySmidMapping, append(wl, activitySmidPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update activity_smid row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for activity_smid") + } + + if !cached { + activitySmidUpdateCacheMut.Lock() + activitySmidUpdateCache[key] = cache + activitySmidUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q activitySmidQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for activity_smid") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for activity_smid") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o ActivitySmidSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), activitySmidPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"activity_smid\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, activitySmidPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in activitySmid slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all activitySmid") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *ActivitySmid) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no activity_smid provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(activitySmidColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + activitySmidUpsertCacheMut.RLock() + cache, cached := activitySmidUpsertCache[key] + activitySmidUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + activitySmidAllColumns, + activitySmidColumnsWithDefault, + activitySmidColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + activitySmidAllColumns, + activitySmidPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert activity_smid, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(activitySmidPrimaryKeyColumns)) + copy(conflict, activitySmidPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"activity_smid\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(activitySmidType, activitySmidMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(activitySmidType, activitySmidMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert activity_smid") + } + + if !cached { + activitySmidUpsertCacheMut.Lock() + activitySmidUpsertCache[key] = cache + activitySmidUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single ActivitySmid record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *ActivitySmid) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no ActivitySmid provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), activitySmidPrimaryKeyMapping) + sql := "DELETE FROM \"activity_smid\" WHERE \"smid\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from activity_smid") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for activity_smid") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q activitySmidQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no activitySmidQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from activity_smid") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for activity_smid") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o ActivitySmidSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(activitySmidBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), activitySmidPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"activity_smid\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, activitySmidPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from activitySmid slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for activity_smid") + } + + if len(activitySmidAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *ActivitySmid) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindActivitySmid(ctx, exec, o.Smid) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *ActivitySmidSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := ActivitySmidSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), activitySmidPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"activity_smid\".* FROM \"activity_smid\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, activitySmidPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in ActivitySmidSlice") + } + + *o = slice + + return nil +} + +// ActivitySmidExists checks if the ActivitySmid row exists. +func ActivitySmidExists(ctx context.Context, exec boil.ContextExecutor, smid int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"activity_smid\" where \"smid\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, smid) + } + row := exec.QueryRowContext(ctx, sql, smid) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if activity_smid exists") + } + + return exists, nil +} diff --git a/models/activity_stds_lookup.go b/models/activity_stds_lookup.go new file mode 100644 index 0000000..9f042d0 --- /dev/null +++ b/models/activity_stds_lookup.go @@ -0,0 +1,919 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/sqlboiler/v4/types" + "github.com/volatiletech/strmangle" +) + +// ActivitySTDSLookup is an object representing the database table. +type ActivitySTDSLookup struct { + STDActID int64 `boil:"std_act_id" json:"std_act_id" toml:"std_act_id" yaml:"std_act_id"` + StandardType string `boil:"standard_type" json:"standard_type" toml:"standard_type" yaml:"standard_type"` + Definition null.String `boil:"definition" json:"definition,omitempty" toml:"definition" yaml:"definition,omitempty"` + StandardUnits string `boil:"standard_units" json:"standard_units" toml:"standard_units" yaml:"standard_units"` + NormalRangeMin types.NullDecimal `boil:"normal_range_min" json:"normal_range_min,omitempty" toml:"normal_range_min" yaml:"normal_range_min,omitempty"` + NormalRangeMax types.NullDecimal `boil:"normal_range_max" json:"normal_range_max,omitempty" toml:"normal_range_max" yaml:"normal_range_max,omitempty"` + + R *activitySTDSLookupR `boil:"-" json:"-" toml:"-" yaml:"-"` + L activitySTDSLookupL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var ActivitySTDSLookupColumns = struct { + STDActID string + StandardType string + Definition string + StandardUnits string + NormalRangeMin string + NormalRangeMax string +}{ + STDActID: "std_act_id", + StandardType: "standard_type", + Definition: "definition", + StandardUnits: "standard_units", + NormalRangeMin: "normal_range_min", + NormalRangeMax: "normal_range_max", +} + +var ActivitySTDSLookupTableColumns = struct { + STDActID string + StandardType string + Definition string + StandardUnits string + NormalRangeMin string + NormalRangeMax string +}{ + STDActID: "activity_stds_lookup.std_act_id", + StandardType: "activity_stds_lookup.standard_type", + Definition: "activity_stds_lookup.definition", + StandardUnits: "activity_stds_lookup.standard_units", + NormalRangeMin: "activity_stds_lookup.normal_range_min", + NormalRangeMax: "activity_stds_lookup.normal_range_max", +} + +// Generated where + +var ActivitySTDSLookupWhere = struct { + STDActID whereHelperint64 + StandardType whereHelperstring + Definition whereHelpernull_String + StandardUnits whereHelperstring + NormalRangeMin whereHelpertypes_NullDecimal + NormalRangeMax whereHelpertypes_NullDecimal +}{ + STDActID: whereHelperint64{field: "\"activity_stds_lookup\".\"std_act_id\""}, + StandardType: whereHelperstring{field: "\"activity_stds_lookup\".\"standard_type\""}, + Definition: whereHelpernull_String{field: "\"activity_stds_lookup\".\"definition\""}, + StandardUnits: whereHelperstring{field: "\"activity_stds_lookup\".\"standard_units\""}, + NormalRangeMin: whereHelpertypes_NullDecimal{field: "\"activity_stds_lookup\".\"normal_range_min\""}, + NormalRangeMax: whereHelpertypes_NullDecimal{field: "\"activity_stds_lookup\".\"normal_range_max\""}, +} + +// ActivitySTDSLookupRels is where relationship names are stored. +var ActivitySTDSLookupRels = struct { +}{} + +// activitySTDSLookupR is where relationships are stored. +type activitySTDSLookupR struct { +} + +// NewStruct creates a new relationship struct +func (*activitySTDSLookupR) NewStruct() *activitySTDSLookupR { + return &activitySTDSLookupR{} +} + +// activitySTDSLookupL is where Load methods for each relationship are stored. +type activitySTDSLookupL struct{} + +var ( + activitySTDSLookupAllColumns = []string{"std_act_id", "standard_type", "definition", "standard_units", "normal_range_min", "normal_range_max"} + activitySTDSLookupColumnsWithoutDefault = []string{"std_act_id", "standard_type", "standard_units"} + activitySTDSLookupColumnsWithDefault = []string{"definition", "normal_range_min", "normal_range_max"} + activitySTDSLookupPrimaryKeyColumns = []string{"std_act_id"} + activitySTDSLookupGeneratedColumns = []string{} +) + +type ( + // ActivitySTDSLookupSlice is an alias for a slice of pointers to ActivitySTDSLookup. + // This should almost always be used instead of []ActivitySTDSLookup. + ActivitySTDSLookupSlice []*ActivitySTDSLookup + // ActivitySTDSLookupHook is the signature for custom ActivitySTDSLookup hook methods + ActivitySTDSLookupHook func(context.Context, boil.ContextExecutor, *ActivitySTDSLookup) error + + activitySTDSLookupQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + activitySTDSLookupType = reflect.TypeOf(&ActivitySTDSLookup{}) + activitySTDSLookupMapping = queries.MakeStructMapping(activitySTDSLookupType) + activitySTDSLookupPrimaryKeyMapping, _ = queries.BindMapping(activitySTDSLookupType, activitySTDSLookupMapping, activitySTDSLookupPrimaryKeyColumns) + activitySTDSLookupInsertCacheMut sync.RWMutex + activitySTDSLookupInsertCache = make(map[string]insertCache) + activitySTDSLookupUpdateCacheMut sync.RWMutex + activitySTDSLookupUpdateCache = make(map[string]updateCache) + activitySTDSLookupUpsertCacheMut sync.RWMutex + activitySTDSLookupUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var activitySTDSLookupAfterSelectHooks []ActivitySTDSLookupHook + +var activitySTDSLookupBeforeInsertHooks []ActivitySTDSLookupHook +var activitySTDSLookupAfterInsertHooks []ActivitySTDSLookupHook + +var activitySTDSLookupBeforeUpdateHooks []ActivitySTDSLookupHook +var activitySTDSLookupAfterUpdateHooks []ActivitySTDSLookupHook + +var activitySTDSLookupBeforeDeleteHooks []ActivitySTDSLookupHook +var activitySTDSLookupAfterDeleteHooks []ActivitySTDSLookupHook + +var activitySTDSLookupBeforeUpsertHooks []ActivitySTDSLookupHook +var activitySTDSLookupAfterUpsertHooks []ActivitySTDSLookupHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *ActivitySTDSLookup) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activitySTDSLookupAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *ActivitySTDSLookup) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activitySTDSLookupBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *ActivitySTDSLookup) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activitySTDSLookupAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *ActivitySTDSLookup) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activitySTDSLookupBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *ActivitySTDSLookup) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activitySTDSLookupAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *ActivitySTDSLookup) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activitySTDSLookupBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *ActivitySTDSLookup) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activitySTDSLookupAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *ActivitySTDSLookup) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activitySTDSLookupBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *ActivitySTDSLookup) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activitySTDSLookupAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddActivitySTDSLookupHook registers your hook function for all future operations. +func AddActivitySTDSLookupHook(hookPoint boil.HookPoint, activitySTDSLookupHook ActivitySTDSLookupHook) { + switch hookPoint { + case boil.AfterSelectHook: + activitySTDSLookupAfterSelectHooks = append(activitySTDSLookupAfterSelectHooks, activitySTDSLookupHook) + case boil.BeforeInsertHook: + activitySTDSLookupBeforeInsertHooks = append(activitySTDSLookupBeforeInsertHooks, activitySTDSLookupHook) + case boil.AfterInsertHook: + activitySTDSLookupAfterInsertHooks = append(activitySTDSLookupAfterInsertHooks, activitySTDSLookupHook) + case boil.BeforeUpdateHook: + activitySTDSLookupBeforeUpdateHooks = append(activitySTDSLookupBeforeUpdateHooks, activitySTDSLookupHook) + case boil.AfterUpdateHook: + activitySTDSLookupAfterUpdateHooks = append(activitySTDSLookupAfterUpdateHooks, activitySTDSLookupHook) + case boil.BeforeDeleteHook: + activitySTDSLookupBeforeDeleteHooks = append(activitySTDSLookupBeforeDeleteHooks, activitySTDSLookupHook) + case boil.AfterDeleteHook: + activitySTDSLookupAfterDeleteHooks = append(activitySTDSLookupAfterDeleteHooks, activitySTDSLookupHook) + case boil.BeforeUpsertHook: + activitySTDSLookupBeforeUpsertHooks = append(activitySTDSLookupBeforeUpsertHooks, activitySTDSLookupHook) + case boil.AfterUpsertHook: + activitySTDSLookupAfterUpsertHooks = append(activitySTDSLookupAfterUpsertHooks, activitySTDSLookupHook) + } +} + +// One returns a single activitySTDSLookup record from the query. +func (q activitySTDSLookupQuery) One(ctx context.Context, exec boil.ContextExecutor) (*ActivitySTDSLookup, error) { + o := &ActivitySTDSLookup{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for activity_stds_lookup") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all ActivitySTDSLookup records from the query. +func (q activitySTDSLookupQuery) All(ctx context.Context, exec boil.ContextExecutor) (ActivitySTDSLookupSlice, error) { + var o []*ActivitySTDSLookup + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to ActivitySTDSLookup slice") + } + + if len(activitySTDSLookupAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all ActivitySTDSLookup records in the query. +func (q activitySTDSLookupQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count activity_stds_lookup rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q activitySTDSLookupQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if activity_stds_lookup exists") + } + + return count > 0, nil +} + +// ActivitySTDSLookups retrieves all the records using an executor. +func ActivitySTDSLookups(mods ...qm.QueryMod) activitySTDSLookupQuery { + mods = append(mods, qm.From("\"activity_stds_lookup\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"activity_stds_lookup\".*"}) + } + + return activitySTDSLookupQuery{q} +} + +// FindActivitySTDSLookup retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindActivitySTDSLookup(ctx context.Context, exec boil.ContextExecutor, sTDActID int64, selectCols ...string) (*ActivitySTDSLookup, error) { + activitySTDSLookupObj := &ActivitySTDSLookup{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"activity_stds_lookup\" where \"std_act_id\"=?", sel, + ) + + q := queries.Raw(query, sTDActID) + + err := q.Bind(ctx, exec, activitySTDSLookupObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from activity_stds_lookup") + } + + if err = activitySTDSLookupObj.doAfterSelectHooks(ctx, exec); err != nil { + return activitySTDSLookupObj, err + } + + return activitySTDSLookupObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *ActivitySTDSLookup) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no activity_stds_lookup provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(activitySTDSLookupColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + activitySTDSLookupInsertCacheMut.RLock() + cache, cached := activitySTDSLookupInsertCache[key] + activitySTDSLookupInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + activitySTDSLookupAllColumns, + activitySTDSLookupColumnsWithDefault, + activitySTDSLookupColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(activitySTDSLookupType, activitySTDSLookupMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(activitySTDSLookupType, activitySTDSLookupMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"activity_stds_lookup\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"activity_stds_lookup\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into activity_stds_lookup") + } + + if !cached { + activitySTDSLookupInsertCacheMut.Lock() + activitySTDSLookupInsertCache[key] = cache + activitySTDSLookupInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the ActivitySTDSLookup. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *ActivitySTDSLookup) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + activitySTDSLookupUpdateCacheMut.RLock() + cache, cached := activitySTDSLookupUpdateCache[key] + activitySTDSLookupUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + activitySTDSLookupAllColumns, + activitySTDSLookupPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update activity_stds_lookup, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"activity_stds_lookup\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, activitySTDSLookupPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(activitySTDSLookupType, activitySTDSLookupMapping, append(wl, activitySTDSLookupPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update activity_stds_lookup row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for activity_stds_lookup") + } + + if !cached { + activitySTDSLookupUpdateCacheMut.Lock() + activitySTDSLookupUpdateCache[key] = cache + activitySTDSLookupUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q activitySTDSLookupQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for activity_stds_lookup") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for activity_stds_lookup") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o ActivitySTDSLookupSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), activitySTDSLookupPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"activity_stds_lookup\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, activitySTDSLookupPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in activitySTDSLookup slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all activitySTDSLookup") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *ActivitySTDSLookup) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no activity_stds_lookup provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(activitySTDSLookupColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + activitySTDSLookupUpsertCacheMut.RLock() + cache, cached := activitySTDSLookupUpsertCache[key] + activitySTDSLookupUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + activitySTDSLookupAllColumns, + activitySTDSLookupColumnsWithDefault, + activitySTDSLookupColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + activitySTDSLookupAllColumns, + activitySTDSLookupPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert activity_stds_lookup, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(activitySTDSLookupPrimaryKeyColumns)) + copy(conflict, activitySTDSLookupPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"activity_stds_lookup\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(activitySTDSLookupType, activitySTDSLookupMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(activitySTDSLookupType, activitySTDSLookupMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert activity_stds_lookup") + } + + if !cached { + activitySTDSLookupUpsertCacheMut.Lock() + activitySTDSLookupUpsertCache[key] = cache + activitySTDSLookupUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single ActivitySTDSLookup record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *ActivitySTDSLookup) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no ActivitySTDSLookup provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), activitySTDSLookupPrimaryKeyMapping) + sql := "DELETE FROM \"activity_stds_lookup\" WHERE \"std_act_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from activity_stds_lookup") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for activity_stds_lookup") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q activitySTDSLookupQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no activitySTDSLookupQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from activity_stds_lookup") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for activity_stds_lookup") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o ActivitySTDSLookupSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(activitySTDSLookupBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), activitySTDSLookupPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"activity_stds_lookup\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, activitySTDSLookupPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from activitySTDSLookup slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for activity_stds_lookup") + } + + if len(activitySTDSLookupAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *ActivitySTDSLookup) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindActivitySTDSLookup(ctx, exec, o.STDActID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *ActivitySTDSLookupSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := ActivitySTDSLookupSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), activitySTDSLookupPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"activity_stds_lookup\".* FROM \"activity_stds_lookup\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, activitySTDSLookupPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in ActivitySTDSLookupSlice") + } + + *o = slice + + return nil +} + +// ActivitySTDSLookupExists checks if the ActivitySTDSLookup row exists. +func ActivitySTDSLookupExists(ctx context.Context, exec boil.ContextExecutor, sTDActID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"activity_stds_lookup\" where \"std_act_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, sTDActID) + } + row := exec.QueryRowContext(ctx, sql, sTDActID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if activity_stds_lookup exists") + } + + return exists, nil +} diff --git a/models/activity_supp.go b/models/activity_supp.go new file mode 100644 index 0000000..7efe513 --- /dev/null +++ b/models/activity_supp.go @@ -0,0 +1,1185 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/sqlboiler/v4/types" + "github.com/volatiletech/strmangle" +) + +// ActivitySupp is an object representing the database table. +type ActivitySupp struct { + AsID int64 `boil:"as_id" json:"as_id" toml:"as_id" yaml:"as_id"` + Rgid int64 `boil:"rgid" json:"rgid" toml:"rgid" yaml:"rgid"` + Smid null.Int64 `boil:"smid" json:"smid,omitempty" toml:"smid" yaml:"smid,omitempty"` + Type string `boil:"type" json:"type" toml:"type" yaml:"type"` + Relation null.String `boil:"relation" json:"relation,omitempty" toml:"relation" yaml:"relation,omitempty"` + Value types.NullDecimal `boil:"value" json:"value,omitempty" toml:"value" yaml:"value,omitempty"` + Units null.String `boil:"units" json:"units,omitempty" toml:"units" yaml:"units,omitempty"` + TextValue null.String `boil:"text_value" json:"text_value,omitempty" toml:"text_value" yaml:"text_value,omitempty"` + StandardType null.String `boil:"standard_type" json:"standard_type,omitempty" toml:"standard_type" yaml:"standard_type,omitempty"` + StandardRelation null.String `boil:"standard_relation" json:"standard_relation,omitempty" toml:"standard_relation" yaml:"standard_relation,omitempty"` + StandardValue types.NullDecimal `boil:"standard_value" json:"standard_value,omitempty" toml:"standard_value" yaml:"standard_value,omitempty"` + StandardUnits null.String `boil:"standard_units" json:"standard_units,omitempty" toml:"standard_units" yaml:"standard_units,omitempty"` + StandardTextValue null.String `boil:"standard_text_value" json:"standard_text_value,omitempty" toml:"standard_text_value" yaml:"standard_text_value,omitempty"` + Comments null.String `boil:"comments" json:"comments,omitempty" toml:"comments" yaml:"comments,omitempty"` + + R *activitySuppR `boil:"-" json:"-" toml:"-" yaml:"-"` + L activitySuppL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var ActivitySuppColumns = struct { + AsID string + Rgid string + Smid string + Type string + Relation string + Value string + Units string + TextValue string + StandardType string + StandardRelation string + StandardValue string + StandardUnits string + StandardTextValue string + Comments string +}{ + AsID: "as_id", + Rgid: "rgid", + Smid: "smid", + Type: "type", + Relation: "relation", + Value: "value", + Units: "units", + TextValue: "text_value", + StandardType: "standard_type", + StandardRelation: "standard_relation", + StandardValue: "standard_value", + StandardUnits: "standard_units", + StandardTextValue: "standard_text_value", + Comments: "comments", +} + +var ActivitySuppTableColumns = struct { + AsID string + Rgid string + Smid string + Type string + Relation string + Value string + Units string + TextValue string + StandardType string + StandardRelation string + StandardValue string + StandardUnits string + StandardTextValue string + Comments string +}{ + AsID: "activity_supp.as_id", + Rgid: "activity_supp.rgid", + Smid: "activity_supp.smid", + Type: "activity_supp.type", + Relation: "activity_supp.relation", + Value: "activity_supp.value", + Units: "activity_supp.units", + TextValue: "activity_supp.text_value", + StandardType: "activity_supp.standard_type", + StandardRelation: "activity_supp.standard_relation", + StandardValue: "activity_supp.standard_value", + StandardUnits: "activity_supp.standard_units", + StandardTextValue: "activity_supp.standard_text_value", + Comments: "activity_supp.comments", +} + +// Generated where + +var ActivitySuppWhere = struct { + AsID whereHelperint64 + Rgid whereHelperint64 + Smid whereHelpernull_Int64 + Type whereHelperstring + Relation whereHelpernull_String + Value whereHelpertypes_NullDecimal + Units whereHelpernull_String + TextValue whereHelpernull_String + StandardType whereHelpernull_String + StandardRelation whereHelpernull_String + StandardValue whereHelpertypes_NullDecimal + StandardUnits whereHelpernull_String + StandardTextValue whereHelpernull_String + Comments whereHelpernull_String +}{ + AsID: whereHelperint64{field: "\"activity_supp\".\"as_id\""}, + Rgid: whereHelperint64{field: "\"activity_supp\".\"rgid\""}, + Smid: whereHelpernull_Int64{field: "\"activity_supp\".\"smid\""}, + Type: whereHelperstring{field: "\"activity_supp\".\"type\""}, + Relation: whereHelpernull_String{field: "\"activity_supp\".\"relation\""}, + Value: whereHelpertypes_NullDecimal{field: "\"activity_supp\".\"value\""}, + Units: whereHelpernull_String{field: "\"activity_supp\".\"units\""}, + TextValue: whereHelpernull_String{field: "\"activity_supp\".\"text_value\""}, + StandardType: whereHelpernull_String{field: "\"activity_supp\".\"standard_type\""}, + StandardRelation: whereHelpernull_String{field: "\"activity_supp\".\"standard_relation\""}, + StandardValue: whereHelpertypes_NullDecimal{field: "\"activity_supp\".\"standard_value\""}, + StandardUnits: whereHelpernull_String{field: "\"activity_supp\".\"standard_units\""}, + StandardTextValue: whereHelpernull_String{field: "\"activity_supp\".\"standard_text_value\""}, + Comments: whereHelpernull_String{field: "\"activity_supp\".\"comments\""}, +} + +// ActivitySuppRels is where relationship names are stored. +var ActivitySuppRels = struct { + SmidActivitySmid string +}{ + SmidActivitySmid: "SmidActivitySmid", +} + +// activitySuppR is where relationships are stored. +type activitySuppR struct { + SmidActivitySmid *ActivitySmid `boil:"SmidActivitySmid" json:"SmidActivitySmid" toml:"SmidActivitySmid" yaml:"SmidActivitySmid"` +} + +// NewStruct creates a new relationship struct +func (*activitySuppR) NewStruct() *activitySuppR { + return &activitySuppR{} +} + +func (r *activitySuppR) GetSmidActivitySmid() *ActivitySmid { + if r == nil { + return nil + } + return r.SmidActivitySmid +} + +// activitySuppL is where Load methods for each relationship are stored. +type activitySuppL struct{} + +var ( + activitySuppAllColumns = []string{"as_id", "rgid", "smid", "type", "relation", "value", "units", "text_value", "standard_type", "standard_relation", "standard_value", "standard_units", "standard_text_value", "comments"} + activitySuppColumnsWithoutDefault = []string{"as_id", "rgid", "type"} + activitySuppColumnsWithDefault = []string{"smid", "relation", "value", "units", "text_value", "standard_type", "standard_relation", "standard_value", "standard_units", "standard_text_value", "comments"} + activitySuppPrimaryKeyColumns = []string{"as_id"} + activitySuppGeneratedColumns = []string{} +) + +type ( + // ActivitySuppSlice is an alias for a slice of pointers to ActivitySupp. + // This should almost always be used instead of []ActivitySupp. + ActivitySuppSlice []*ActivitySupp + // ActivitySuppHook is the signature for custom ActivitySupp hook methods + ActivitySuppHook func(context.Context, boil.ContextExecutor, *ActivitySupp) error + + activitySuppQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + activitySuppType = reflect.TypeOf(&ActivitySupp{}) + activitySuppMapping = queries.MakeStructMapping(activitySuppType) + activitySuppPrimaryKeyMapping, _ = queries.BindMapping(activitySuppType, activitySuppMapping, activitySuppPrimaryKeyColumns) + activitySuppInsertCacheMut sync.RWMutex + activitySuppInsertCache = make(map[string]insertCache) + activitySuppUpdateCacheMut sync.RWMutex + activitySuppUpdateCache = make(map[string]updateCache) + activitySuppUpsertCacheMut sync.RWMutex + activitySuppUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var activitySuppAfterSelectHooks []ActivitySuppHook + +var activitySuppBeforeInsertHooks []ActivitySuppHook +var activitySuppAfterInsertHooks []ActivitySuppHook + +var activitySuppBeforeUpdateHooks []ActivitySuppHook +var activitySuppAfterUpdateHooks []ActivitySuppHook + +var activitySuppBeforeDeleteHooks []ActivitySuppHook +var activitySuppAfterDeleteHooks []ActivitySuppHook + +var activitySuppBeforeUpsertHooks []ActivitySuppHook +var activitySuppAfterUpsertHooks []ActivitySuppHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *ActivitySupp) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activitySuppAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *ActivitySupp) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activitySuppBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *ActivitySupp) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activitySuppAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *ActivitySupp) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activitySuppBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *ActivitySupp) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activitySuppAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *ActivitySupp) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activitySuppBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *ActivitySupp) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activitySuppAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *ActivitySupp) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activitySuppBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *ActivitySupp) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activitySuppAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddActivitySuppHook registers your hook function for all future operations. +func AddActivitySuppHook(hookPoint boil.HookPoint, activitySuppHook ActivitySuppHook) { + switch hookPoint { + case boil.AfterSelectHook: + activitySuppAfterSelectHooks = append(activitySuppAfterSelectHooks, activitySuppHook) + case boil.BeforeInsertHook: + activitySuppBeforeInsertHooks = append(activitySuppBeforeInsertHooks, activitySuppHook) + case boil.AfterInsertHook: + activitySuppAfterInsertHooks = append(activitySuppAfterInsertHooks, activitySuppHook) + case boil.BeforeUpdateHook: + activitySuppBeforeUpdateHooks = append(activitySuppBeforeUpdateHooks, activitySuppHook) + case boil.AfterUpdateHook: + activitySuppAfterUpdateHooks = append(activitySuppAfterUpdateHooks, activitySuppHook) + case boil.BeforeDeleteHook: + activitySuppBeforeDeleteHooks = append(activitySuppBeforeDeleteHooks, activitySuppHook) + case boil.AfterDeleteHook: + activitySuppAfterDeleteHooks = append(activitySuppAfterDeleteHooks, activitySuppHook) + case boil.BeforeUpsertHook: + activitySuppBeforeUpsertHooks = append(activitySuppBeforeUpsertHooks, activitySuppHook) + case boil.AfterUpsertHook: + activitySuppAfterUpsertHooks = append(activitySuppAfterUpsertHooks, activitySuppHook) + } +} + +// One returns a single activitySupp record from the query. +func (q activitySuppQuery) One(ctx context.Context, exec boil.ContextExecutor) (*ActivitySupp, error) { + o := &ActivitySupp{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for activity_supp") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all ActivitySupp records from the query. +func (q activitySuppQuery) All(ctx context.Context, exec boil.ContextExecutor) (ActivitySuppSlice, error) { + var o []*ActivitySupp + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to ActivitySupp slice") + } + + if len(activitySuppAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all ActivitySupp records in the query. +func (q activitySuppQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count activity_supp rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q activitySuppQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if activity_supp exists") + } + + return count > 0, nil +} + +// SmidActivitySmid pointed to by the foreign key. +func (o *ActivitySupp) SmidActivitySmid(mods ...qm.QueryMod) activitySmidQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"smid\" = ?", o.Smid), + } + + queryMods = append(queryMods, mods...) + + return ActivitySmids(queryMods...) +} + +// LoadSmidActivitySmid allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (activitySuppL) LoadSmidActivitySmid(ctx context.Context, e boil.ContextExecutor, singular bool, maybeActivitySupp interface{}, mods queries.Applicator) error { + var slice []*ActivitySupp + var object *ActivitySupp + + if singular { + object = maybeActivitySupp.(*ActivitySupp) + } else { + slice = *maybeActivitySupp.(*[]*ActivitySupp) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &activitySuppR{} + } + if !queries.IsNil(object.Smid) { + args = append(args, object.Smid) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &activitySuppR{} + } + + for _, a := range args { + if queries.Equal(a, obj.Smid) { + continue Outer + } + } + + if !queries.IsNil(obj.Smid) { + args = append(args, obj.Smid) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`activity_smid`), + qm.WhereIn(`activity_smid.smid in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load ActivitySmid") + } + + var resultSlice []*ActivitySmid + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice ActivitySmid") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for activity_smid") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for activity_smid") + } + + if len(activitySuppAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.SmidActivitySmid = foreign + if foreign.R == nil { + foreign.R = &activitySmidR{} + } + foreign.R.SmidActivitySupps = append(foreign.R.SmidActivitySupps, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.Smid, foreign.Smid) { + local.R.SmidActivitySmid = foreign + if foreign.R == nil { + foreign.R = &activitySmidR{} + } + foreign.R.SmidActivitySupps = append(foreign.R.SmidActivitySupps, local) + break + } + } + } + + return nil +} + +// SetSmidActivitySmid of the activitySupp to the related item. +// Sets o.R.SmidActivitySmid to related. +// Adds o to related.R.SmidActivitySupps. +func (o *ActivitySupp) SetSmidActivitySmid(ctx context.Context, exec boil.ContextExecutor, insert bool, related *ActivitySmid) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"activity_supp\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"smid"}), + strmangle.WhereClause("\"", "\"", 0, activitySuppPrimaryKeyColumns), + ) + values := []interface{}{related.Smid, o.AsID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.Smid, related.Smid) + if o.R == nil { + o.R = &activitySuppR{ + SmidActivitySmid: related, + } + } else { + o.R.SmidActivitySmid = related + } + + if related.R == nil { + related.R = &activitySmidR{ + SmidActivitySupps: ActivitySuppSlice{o}, + } + } else { + related.R.SmidActivitySupps = append(related.R.SmidActivitySupps, o) + } + + return nil +} + +// RemoveSmidActivitySmid relationship. +// Sets o.R.SmidActivitySmid to nil. +// Removes o from all passed in related items' relationships struct. +func (o *ActivitySupp) RemoveSmidActivitySmid(ctx context.Context, exec boil.ContextExecutor, related *ActivitySmid) error { + var err error + + queries.SetScanner(&o.Smid, nil) + if _, err = o.Update(ctx, exec, boil.Whitelist("smid")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.SmidActivitySmid = nil + } + if related == nil || related.R == nil { + return nil + } + + for i, ri := range related.R.SmidActivitySupps { + if queries.Equal(o.Smid, ri.Smid) { + continue + } + + ln := len(related.R.SmidActivitySupps) + if ln > 1 && i < ln-1 { + related.R.SmidActivitySupps[i] = related.R.SmidActivitySupps[ln-1] + } + related.R.SmidActivitySupps = related.R.SmidActivitySupps[:ln-1] + break + } + return nil +} + +// ActivitySupps retrieves all the records using an executor. +func ActivitySupps(mods ...qm.QueryMod) activitySuppQuery { + mods = append(mods, qm.From("\"activity_supp\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"activity_supp\".*"}) + } + + return activitySuppQuery{q} +} + +// FindActivitySupp retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindActivitySupp(ctx context.Context, exec boil.ContextExecutor, asID int64, selectCols ...string) (*ActivitySupp, error) { + activitySuppObj := &ActivitySupp{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"activity_supp\" where \"as_id\"=?", sel, + ) + + q := queries.Raw(query, asID) + + err := q.Bind(ctx, exec, activitySuppObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from activity_supp") + } + + if err = activitySuppObj.doAfterSelectHooks(ctx, exec); err != nil { + return activitySuppObj, err + } + + return activitySuppObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *ActivitySupp) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no activity_supp provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(activitySuppColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + activitySuppInsertCacheMut.RLock() + cache, cached := activitySuppInsertCache[key] + activitySuppInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + activitySuppAllColumns, + activitySuppColumnsWithDefault, + activitySuppColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(activitySuppType, activitySuppMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(activitySuppType, activitySuppMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"activity_supp\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"activity_supp\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into activity_supp") + } + + if !cached { + activitySuppInsertCacheMut.Lock() + activitySuppInsertCache[key] = cache + activitySuppInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the ActivitySupp. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *ActivitySupp) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + activitySuppUpdateCacheMut.RLock() + cache, cached := activitySuppUpdateCache[key] + activitySuppUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + activitySuppAllColumns, + activitySuppPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update activity_supp, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"activity_supp\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, activitySuppPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(activitySuppType, activitySuppMapping, append(wl, activitySuppPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update activity_supp row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for activity_supp") + } + + if !cached { + activitySuppUpdateCacheMut.Lock() + activitySuppUpdateCache[key] = cache + activitySuppUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q activitySuppQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for activity_supp") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for activity_supp") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o ActivitySuppSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), activitySuppPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"activity_supp\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, activitySuppPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in activitySupp slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all activitySupp") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *ActivitySupp) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no activity_supp provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(activitySuppColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + activitySuppUpsertCacheMut.RLock() + cache, cached := activitySuppUpsertCache[key] + activitySuppUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + activitySuppAllColumns, + activitySuppColumnsWithDefault, + activitySuppColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + activitySuppAllColumns, + activitySuppPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert activity_supp, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(activitySuppPrimaryKeyColumns)) + copy(conflict, activitySuppPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"activity_supp\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(activitySuppType, activitySuppMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(activitySuppType, activitySuppMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert activity_supp") + } + + if !cached { + activitySuppUpsertCacheMut.Lock() + activitySuppUpsertCache[key] = cache + activitySuppUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single ActivitySupp record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *ActivitySupp) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no ActivitySupp provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), activitySuppPrimaryKeyMapping) + sql := "DELETE FROM \"activity_supp\" WHERE \"as_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from activity_supp") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for activity_supp") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q activitySuppQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no activitySuppQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from activity_supp") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for activity_supp") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o ActivitySuppSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(activitySuppBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), activitySuppPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"activity_supp\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, activitySuppPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from activitySupp slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for activity_supp") + } + + if len(activitySuppAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *ActivitySupp) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindActivitySupp(ctx, exec, o.AsID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *ActivitySuppSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := ActivitySuppSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), activitySuppPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"activity_supp\".* FROM \"activity_supp\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, activitySuppPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in ActivitySuppSlice") + } + + *o = slice + + return nil +} + +// ActivitySuppExists checks if the ActivitySupp row exists. +func ActivitySuppExists(ctx context.Context, exec boil.ContextExecutor, asID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"activity_supp\" where \"as_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, asID) + } + row := exec.QueryRowContext(ctx, sql, asID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if activity_supp exists") + } + + return exists, nil +} diff --git a/models/activity_supp_map.go b/models/activity_supp_map.go new file mode 100644 index 0000000..4ad0506 --- /dev/null +++ b/models/activity_supp_map.go @@ -0,0 +1,1241 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// ActivitySuppMap is an object representing the database table. +type ActivitySuppMap struct { + ActsmID int64 `boil:"actsm_id" json:"actsm_id" toml:"actsm_id" yaml:"actsm_id"` + ActivityID int64 `boil:"activity_id" json:"activity_id" toml:"activity_id" yaml:"activity_id"` + Smid int64 `boil:"smid" json:"smid" toml:"smid" yaml:"smid"` + + R *activitySuppMapR `boil:"-" json:"-" toml:"-" yaml:"-"` + L activitySuppMapL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var ActivitySuppMapColumns = struct { + ActsmID string + ActivityID string + Smid string +}{ + ActsmID: "actsm_id", + ActivityID: "activity_id", + Smid: "smid", +} + +var ActivitySuppMapTableColumns = struct { + ActsmID string + ActivityID string + Smid string +}{ + ActsmID: "activity_supp_map.actsm_id", + ActivityID: "activity_supp_map.activity_id", + Smid: "activity_supp_map.smid", +} + +// Generated where + +var ActivitySuppMapWhere = struct { + ActsmID whereHelperint64 + ActivityID whereHelperint64 + Smid whereHelperint64 +}{ + ActsmID: whereHelperint64{field: "\"activity_supp_map\".\"actsm_id\""}, + ActivityID: whereHelperint64{field: "\"activity_supp_map\".\"activity_id\""}, + Smid: whereHelperint64{field: "\"activity_supp_map\".\"smid\""}, +} + +// ActivitySuppMapRels is where relationship names are stored. +var ActivitySuppMapRels = struct { + Activity string + SmidActivitySmid string +}{ + Activity: "Activity", + SmidActivitySmid: "SmidActivitySmid", +} + +// activitySuppMapR is where relationships are stored. +type activitySuppMapR struct { + Activity *Activity `boil:"Activity" json:"Activity" toml:"Activity" yaml:"Activity"` + SmidActivitySmid *ActivitySmid `boil:"SmidActivitySmid" json:"SmidActivitySmid" toml:"SmidActivitySmid" yaml:"SmidActivitySmid"` +} + +// NewStruct creates a new relationship struct +func (*activitySuppMapR) NewStruct() *activitySuppMapR { + return &activitySuppMapR{} +} + +func (r *activitySuppMapR) GetActivity() *Activity { + if r == nil { + return nil + } + return r.Activity +} + +func (r *activitySuppMapR) GetSmidActivitySmid() *ActivitySmid { + if r == nil { + return nil + } + return r.SmidActivitySmid +} + +// activitySuppMapL is where Load methods for each relationship are stored. +type activitySuppMapL struct{} + +var ( + activitySuppMapAllColumns = []string{"actsm_id", "activity_id", "smid"} + activitySuppMapColumnsWithoutDefault = []string{"actsm_id", "activity_id", "smid"} + activitySuppMapColumnsWithDefault = []string{} + activitySuppMapPrimaryKeyColumns = []string{"actsm_id"} + activitySuppMapGeneratedColumns = []string{} +) + +type ( + // ActivitySuppMapSlice is an alias for a slice of pointers to ActivitySuppMap. + // This should almost always be used instead of []ActivitySuppMap. + ActivitySuppMapSlice []*ActivitySuppMap + // ActivitySuppMapHook is the signature for custom ActivitySuppMap hook methods + ActivitySuppMapHook func(context.Context, boil.ContextExecutor, *ActivitySuppMap) error + + activitySuppMapQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + activitySuppMapType = reflect.TypeOf(&ActivitySuppMap{}) + activitySuppMapMapping = queries.MakeStructMapping(activitySuppMapType) + activitySuppMapPrimaryKeyMapping, _ = queries.BindMapping(activitySuppMapType, activitySuppMapMapping, activitySuppMapPrimaryKeyColumns) + activitySuppMapInsertCacheMut sync.RWMutex + activitySuppMapInsertCache = make(map[string]insertCache) + activitySuppMapUpdateCacheMut sync.RWMutex + activitySuppMapUpdateCache = make(map[string]updateCache) + activitySuppMapUpsertCacheMut sync.RWMutex + activitySuppMapUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var activitySuppMapAfterSelectHooks []ActivitySuppMapHook + +var activitySuppMapBeforeInsertHooks []ActivitySuppMapHook +var activitySuppMapAfterInsertHooks []ActivitySuppMapHook + +var activitySuppMapBeforeUpdateHooks []ActivitySuppMapHook +var activitySuppMapAfterUpdateHooks []ActivitySuppMapHook + +var activitySuppMapBeforeDeleteHooks []ActivitySuppMapHook +var activitySuppMapAfterDeleteHooks []ActivitySuppMapHook + +var activitySuppMapBeforeUpsertHooks []ActivitySuppMapHook +var activitySuppMapAfterUpsertHooks []ActivitySuppMapHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *ActivitySuppMap) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activitySuppMapAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *ActivitySuppMap) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activitySuppMapBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *ActivitySuppMap) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activitySuppMapAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *ActivitySuppMap) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activitySuppMapBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *ActivitySuppMap) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activitySuppMapAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *ActivitySuppMap) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activitySuppMapBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *ActivitySuppMap) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activitySuppMapAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *ActivitySuppMap) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activitySuppMapBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *ActivitySuppMap) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range activitySuppMapAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddActivitySuppMapHook registers your hook function for all future operations. +func AddActivitySuppMapHook(hookPoint boil.HookPoint, activitySuppMapHook ActivitySuppMapHook) { + switch hookPoint { + case boil.AfterSelectHook: + activitySuppMapAfterSelectHooks = append(activitySuppMapAfterSelectHooks, activitySuppMapHook) + case boil.BeforeInsertHook: + activitySuppMapBeforeInsertHooks = append(activitySuppMapBeforeInsertHooks, activitySuppMapHook) + case boil.AfterInsertHook: + activitySuppMapAfterInsertHooks = append(activitySuppMapAfterInsertHooks, activitySuppMapHook) + case boil.BeforeUpdateHook: + activitySuppMapBeforeUpdateHooks = append(activitySuppMapBeforeUpdateHooks, activitySuppMapHook) + case boil.AfterUpdateHook: + activitySuppMapAfterUpdateHooks = append(activitySuppMapAfterUpdateHooks, activitySuppMapHook) + case boil.BeforeDeleteHook: + activitySuppMapBeforeDeleteHooks = append(activitySuppMapBeforeDeleteHooks, activitySuppMapHook) + case boil.AfterDeleteHook: + activitySuppMapAfterDeleteHooks = append(activitySuppMapAfterDeleteHooks, activitySuppMapHook) + case boil.BeforeUpsertHook: + activitySuppMapBeforeUpsertHooks = append(activitySuppMapBeforeUpsertHooks, activitySuppMapHook) + case boil.AfterUpsertHook: + activitySuppMapAfterUpsertHooks = append(activitySuppMapAfterUpsertHooks, activitySuppMapHook) + } +} + +// One returns a single activitySuppMap record from the query. +func (q activitySuppMapQuery) One(ctx context.Context, exec boil.ContextExecutor) (*ActivitySuppMap, error) { + o := &ActivitySuppMap{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for activity_supp_map") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all ActivitySuppMap records from the query. +func (q activitySuppMapQuery) All(ctx context.Context, exec boil.ContextExecutor) (ActivitySuppMapSlice, error) { + var o []*ActivitySuppMap + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to ActivitySuppMap slice") + } + + if len(activitySuppMapAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all ActivitySuppMap records in the query. +func (q activitySuppMapQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count activity_supp_map rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q activitySuppMapQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if activity_supp_map exists") + } + + return count > 0, nil +} + +// Activity pointed to by the foreign key. +func (o *ActivitySuppMap) Activity(mods ...qm.QueryMod) activityQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"activity_id\" = ?", o.ActivityID), + } + + queryMods = append(queryMods, mods...) + + return Activities(queryMods...) +} + +// SmidActivitySmid pointed to by the foreign key. +func (o *ActivitySuppMap) SmidActivitySmid(mods ...qm.QueryMod) activitySmidQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"smid\" = ?", o.Smid), + } + + queryMods = append(queryMods, mods...) + + return ActivitySmids(queryMods...) +} + +// LoadActivity allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (activitySuppMapL) LoadActivity(ctx context.Context, e boil.ContextExecutor, singular bool, maybeActivitySuppMap interface{}, mods queries.Applicator) error { + var slice []*ActivitySuppMap + var object *ActivitySuppMap + + if singular { + object = maybeActivitySuppMap.(*ActivitySuppMap) + } else { + slice = *maybeActivitySuppMap.(*[]*ActivitySuppMap) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &activitySuppMapR{} + } + args = append(args, object.ActivityID) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &activitySuppMapR{} + } + + for _, a := range args { + if a == obj.ActivityID { + continue Outer + } + } + + args = append(args, obj.ActivityID) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`activities`), + qm.WhereIn(`activities.activity_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load Activity") + } + + var resultSlice []*Activity + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice Activity") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for activities") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for activities") + } + + if len(activitySuppMapAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Activity = foreign + if foreign.R == nil { + foreign.R = &activityR{} + } + foreign.R.ActivitySuppMaps = append(foreign.R.ActivitySuppMaps, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.ActivityID == foreign.ActivityID { + local.R.Activity = foreign + if foreign.R == nil { + foreign.R = &activityR{} + } + foreign.R.ActivitySuppMaps = append(foreign.R.ActivitySuppMaps, local) + break + } + } + } + + return nil +} + +// LoadSmidActivitySmid allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (activitySuppMapL) LoadSmidActivitySmid(ctx context.Context, e boil.ContextExecutor, singular bool, maybeActivitySuppMap interface{}, mods queries.Applicator) error { + var slice []*ActivitySuppMap + var object *ActivitySuppMap + + if singular { + object = maybeActivitySuppMap.(*ActivitySuppMap) + } else { + slice = *maybeActivitySuppMap.(*[]*ActivitySuppMap) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &activitySuppMapR{} + } + args = append(args, object.Smid) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &activitySuppMapR{} + } + + for _, a := range args { + if a == obj.Smid { + continue Outer + } + } + + args = append(args, obj.Smid) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`activity_smid`), + qm.WhereIn(`activity_smid.smid in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load ActivitySmid") + } + + var resultSlice []*ActivitySmid + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice ActivitySmid") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for activity_smid") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for activity_smid") + } + + if len(activitySuppMapAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.SmidActivitySmid = foreign + if foreign.R == nil { + foreign.R = &activitySmidR{} + } + foreign.R.SmidActivitySuppMaps = append(foreign.R.SmidActivitySuppMaps, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.Smid == foreign.Smid { + local.R.SmidActivitySmid = foreign + if foreign.R == nil { + foreign.R = &activitySmidR{} + } + foreign.R.SmidActivitySuppMaps = append(foreign.R.SmidActivitySuppMaps, local) + break + } + } + } + + return nil +} + +// SetActivity of the activitySuppMap to the related item. +// Sets o.R.Activity to related. +// Adds o to related.R.ActivitySuppMaps. +func (o *ActivitySuppMap) SetActivity(ctx context.Context, exec boil.ContextExecutor, insert bool, related *Activity) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"activity_supp_map\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"activity_id"}), + strmangle.WhereClause("\"", "\"", 0, activitySuppMapPrimaryKeyColumns), + ) + values := []interface{}{related.ActivityID, o.ActsmID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.ActivityID = related.ActivityID + if o.R == nil { + o.R = &activitySuppMapR{ + Activity: related, + } + } else { + o.R.Activity = related + } + + if related.R == nil { + related.R = &activityR{ + ActivitySuppMaps: ActivitySuppMapSlice{o}, + } + } else { + related.R.ActivitySuppMaps = append(related.R.ActivitySuppMaps, o) + } + + return nil +} + +// SetSmidActivitySmid of the activitySuppMap to the related item. +// Sets o.R.SmidActivitySmid to related. +// Adds o to related.R.SmidActivitySuppMaps. +func (o *ActivitySuppMap) SetSmidActivitySmid(ctx context.Context, exec boil.ContextExecutor, insert bool, related *ActivitySmid) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"activity_supp_map\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"smid"}), + strmangle.WhereClause("\"", "\"", 0, activitySuppMapPrimaryKeyColumns), + ) + values := []interface{}{related.Smid, o.ActsmID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.Smid = related.Smid + if o.R == nil { + o.R = &activitySuppMapR{ + SmidActivitySmid: related, + } + } else { + o.R.SmidActivitySmid = related + } + + if related.R == nil { + related.R = &activitySmidR{ + SmidActivitySuppMaps: ActivitySuppMapSlice{o}, + } + } else { + related.R.SmidActivitySuppMaps = append(related.R.SmidActivitySuppMaps, o) + } + + return nil +} + +// ActivitySuppMaps retrieves all the records using an executor. +func ActivitySuppMaps(mods ...qm.QueryMod) activitySuppMapQuery { + mods = append(mods, qm.From("\"activity_supp_map\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"activity_supp_map\".*"}) + } + + return activitySuppMapQuery{q} +} + +// FindActivitySuppMap retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindActivitySuppMap(ctx context.Context, exec boil.ContextExecutor, actsmID int64, selectCols ...string) (*ActivitySuppMap, error) { + activitySuppMapObj := &ActivitySuppMap{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"activity_supp_map\" where \"actsm_id\"=?", sel, + ) + + q := queries.Raw(query, actsmID) + + err := q.Bind(ctx, exec, activitySuppMapObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from activity_supp_map") + } + + if err = activitySuppMapObj.doAfterSelectHooks(ctx, exec); err != nil { + return activitySuppMapObj, err + } + + return activitySuppMapObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *ActivitySuppMap) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no activity_supp_map provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(activitySuppMapColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + activitySuppMapInsertCacheMut.RLock() + cache, cached := activitySuppMapInsertCache[key] + activitySuppMapInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + activitySuppMapAllColumns, + activitySuppMapColumnsWithDefault, + activitySuppMapColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(activitySuppMapType, activitySuppMapMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(activitySuppMapType, activitySuppMapMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"activity_supp_map\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"activity_supp_map\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into activity_supp_map") + } + + if !cached { + activitySuppMapInsertCacheMut.Lock() + activitySuppMapInsertCache[key] = cache + activitySuppMapInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the ActivitySuppMap. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *ActivitySuppMap) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + activitySuppMapUpdateCacheMut.RLock() + cache, cached := activitySuppMapUpdateCache[key] + activitySuppMapUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + activitySuppMapAllColumns, + activitySuppMapPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update activity_supp_map, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"activity_supp_map\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, activitySuppMapPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(activitySuppMapType, activitySuppMapMapping, append(wl, activitySuppMapPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update activity_supp_map row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for activity_supp_map") + } + + if !cached { + activitySuppMapUpdateCacheMut.Lock() + activitySuppMapUpdateCache[key] = cache + activitySuppMapUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q activitySuppMapQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for activity_supp_map") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for activity_supp_map") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o ActivitySuppMapSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), activitySuppMapPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"activity_supp_map\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, activitySuppMapPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in activitySuppMap slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all activitySuppMap") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *ActivitySuppMap) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no activity_supp_map provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(activitySuppMapColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + activitySuppMapUpsertCacheMut.RLock() + cache, cached := activitySuppMapUpsertCache[key] + activitySuppMapUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + activitySuppMapAllColumns, + activitySuppMapColumnsWithDefault, + activitySuppMapColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + activitySuppMapAllColumns, + activitySuppMapPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert activity_supp_map, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(activitySuppMapPrimaryKeyColumns)) + copy(conflict, activitySuppMapPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"activity_supp_map\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(activitySuppMapType, activitySuppMapMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(activitySuppMapType, activitySuppMapMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert activity_supp_map") + } + + if !cached { + activitySuppMapUpsertCacheMut.Lock() + activitySuppMapUpsertCache[key] = cache + activitySuppMapUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single ActivitySuppMap record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *ActivitySuppMap) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no ActivitySuppMap provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), activitySuppMapPrimaryKeyMapping) + sql := "DELETE FROM \"activity_supp_map\" WHERE \"actsm_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from activity_supp_map") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for activity_supp_map") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q activitySuppMapQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no activitySuppMapQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from activity_supp_map") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for activity_supp_map") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o ActivitySuppMapSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(activitySuppMapBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), activitySuppMapPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"activity_supp_map\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, activitySuppMapPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from activitySuppMap slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for activity_supp_map") + } + + if len(activitySuppMapAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *ActivitySuppMap) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindActivitySuppMap(ctx, exec, o.ActsmID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *ActivitySuppMapSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := ActivitySuppMapSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), activitySuppMapPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"activity_supp_map\".* FROM \"activity_supp_map\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, activitySuppMapPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in ActivitySuppMapSlice") + } + + *o = slice + + return nil +} + +// ActivitySuppMapExists checks if the ActivitySuppMap row exists. +func ActivitySuppMapExists(ctx context.Context, exec boil.ContextExecutor, actsmID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"activity_supp_map\" where \"actsm_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, actsmID) + } + row := exec.QueryRowContext(ctx, sql, actsmID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if activity_supp_map exists") + } + + return exists, nil +} diff --git a/models/assay_class_map.go b/models/assay_class_map.go new file mode 100644 index 0000000..9748e99 --- /dev/null +++ b/models/assay_class_map.go @@ -0,0 +1,1241 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// AssayClassMap is an object representing the database table. +type AssayClassMap struct { + AssCLSMapID int64 `boil:"ass_cls_map_id" json:"ass_cls_map_id" toml:"ass_cls_map_id" yaml:"ass_cls_map_id"` + AssayID int64 `boil:"assay_id" json:"assay_id" toml:"assay_id" yaml:"assay_id"` + AssayClassID int64 `boil:"assay_class_id" json:"assay_class_id" toml:"assay_class_id" yaml:"assay_class_id"` + + R *assayClassMapR `boil:"-" json:"-" toml:"-" yaml:"-"` + L assayClassMapL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var AssayClassMapColumns = struct { + AssCLSMapID string + AssayID string + AssayClassID string +}{ + AssCLSMapID: "ass_cls_map_id", + AssayID: "assay_id", + AssayClassID: "assay_class_id", +} + +var AssayClassMapTableColumns = struct { + AssCLSMapID string + AssayID string + AssayClassID string +}{ + AssCLSMapID: "assay_class_map.ass_cls_map_id", + AssayID: "assay_class_map.assay_id", + AssayClassID: "assay_class_map.assay_class_id", +} + +// Generated where + +var AssayClassMapWhere = struct { + AssCLSMapID whereHelperint64 + AssayID whereHelperint64 + AssayClassID whereHelperint64 +}{ + AssCLSMapID: whereHelperint64{field: "\"assay_class_map\".\"ass_cls_map_id\""}, + AssayID: whereHelperint64{field: "\"assay_class_map\".\"assay_id\""}, + AssayClassID: whereHelperint64{field: "\"assay_class_map\".\"assay_class_id\""}, +} + +// AssayClassMapRels is where relationship names are stored. +var AssayClassMapRels = struct { + AssayClass string + Assay string +}{ + AssayClass: "AssayClass", + Assay: "Assay", +} + +// assayClassMapR is where relationships are stored. +type assayClassMapR struct { + AssayClass *AssayClassification `boil:"AssayClass" json:"AssayClass" toml:"AssayClass" yaml:"AssayClass"` + Assay *Assay `boil:"Assay" json:"Assay" toml:"Assay" yaml:"Assay"` +} + +// NewStruct creates a new relationship struct +func (*assayClassMapR) NewStruct() *assayClassMapR { + return &assayClassMapR{} +} + +func (r *assayClassMapR) GetAssayClass() *AssayClassification { + if r == nil { + return nil + } + return r.AssayClass +} + +func (r *assayClassMapR) GetAssay() *Assay { + if r == nil { + return nil + } + return r.Assay +} + +// assayClassMapL is where Load methods for each relationship are stored. +type assayClassMapL struct{} + +var ( + assayClassMapAllColumns = []string{"ass_cls_map_id", "assay_id", "assay_class_id"} + assayClassMapColumnsWithoutDefault = []string{"ass_cls_map_id", "assay_id", "assay_class_id"} + assayClassMapColumnsWithDefault = []string{} + assayClassMapPrimaryKeyColumns = []string{"ass_cls_map_id"} + assayClassMapGeneratedColumns = []string{} +) + +type ( + // AssayClassMapSlice is an alias for a slice of pointers to AssayClassMap. + // This should almost always be used instead of []AssayClassMap. + AssayClassMapSlice []*AssayClassMap + // AssayClassMapHook is the signature for custom AssayClassMap hook methods + AssayClassMapHook func(context.Context, boil.ContextExecutor, *AssayClassMap) error + + assayClassMapQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + assayClassMapType = reflect.TypeOf(&AssayClassMap{}) + assayClassMapMapping = queries.MakeStructMapping(assayClassMapType) + assayClassMapPrimaryKeyMapping, _ = queries.BindMapping(assayClassMapType, assayClassMapMapping, assayClassMapPrimaryKeyColumns) + assayClassMapInsertCacheMut sync.RWMutex + assayClassMapInsertCache = make(map[string]insertCache) + assayClassMapUpdateCacheMut sync.RWMutex + assayClassMapUpdateCache = make(map[string]updateCache) + assayClassMapUpsertCacheMut sync.RWMutex + assayClassMapUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var assayClassMapAfterSelectHooks []AssayClassMapHook + +var assayClassMapBeforeInsertHooks []AssayClassMapHook +var assayClassMapAfterInsertHooks []AssayClassMapHook + +var assayClassMapBeforeUpdateHooks []AssayClassMapHook +var assayClassMapAfterUpdateHooks []AssayClassMapHook + +var assayClassMapBeforeDeleteHooks []AssayClassMapHook +var assayClassMapAfterDeleteHooks []AssayClassMapHook + +var assayClassMapBeforeUpsertHooks []AssayClassMapHook +var assayClassMapAfterUpsertHooks []AssayClassMapHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *AssayClassMap) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range assayClassMapAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *AssayClassMap) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range assayClassMapBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *AssayClassMap) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range assayClassMapAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *AssayClassMap) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range assayClassMapBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *AssayClassMap) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range assayClassMapAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *AssayClassMap) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range assayClassMapBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *AssayClassMap) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range assayClassMapAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *AssayClassMap) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range assayClassMapBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *AssayClassMap) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range assayClassMapAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddAssayClassMapHook registers your hook function for all future operations. +func AddAssayClassMapHook(hookPoint boil.HookPoint, assayClassMapHook AssayClassMapHook) { + switch hookPoint { + case boil.AfterSelectHook: + assayClassMapAfterSelectHooks = append(assayClassMapAfterSelectHooks, assayClassMapHook) + case boil.BeforeInsertHook: + assayClassMapBeforeInsertHooks = append(assayClassMapBeforeInsertHooks, assayClassMapHook) + case boil.AfterInsertHook: + assayClassMapAfterInsertHooks = append(assayClassMapAfterInsertHooks, assayClassMapHook) + case boil.BeforeUpdateHook: + assayClassMapBeforeUpdateHooks = append(assayClassMapBeforeUpdateHooks, assayClassMapHook) + case boil.AfterUpdateHook: + assayClassMapAfterUpdateHooks = append(assayClassMapAfterUpdateHooks, assayClassMapHook) + case boil.BeforeDeleteHook: + assayClassMapBeforeDeleteHooks = append(assayClassMapBeforeDeleteHooks, assayClassMapHook) + case boil.AfterDeleteHook: + assayClassMapAfterDeleteHooks = append(assayClassMapAfterDeleteHooks, assayClassMapHook) + case boil.BeforeUpsertHook: + assayClassMapBeforeUpsertHooks = append(assayClassMapBeforeUpsertHooks, assayClassMapHook) + case boil.AfterUpsertHook: + assayClassMapAfterUpsertHooks = append(assayClassMapAfterUpsertHooks, assayClassMapHook) + } +} + +// One returns a single assayClassMap record from the query. +func (q assayClassMapQuery) One(ctx context.Context, exec boil.ContextExecutor) (*AssayClassMap, error) { + o := &AssayClassMap{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for assay_class_map") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all AssayClassMap records from the query. +func (q assayClassMapQuery) All(ctx context.Context, exec boil.ContextExecutor) (AssayClassMapSlice, error) { + var o []*AssayClassMap + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to AssayClassMap slice") + } + + if len(assayClassMapAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all AssayClassMap records in the query. +func (q assayClassMapQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count assay_class_map rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q assayClassMapQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if assay_class_map exists") + } + + return count > 0, nil +} + +// AssayClass pointed to by the foreign key. +func (o *AssayClassMap) AssayClass(mods ...qm.QueryMod) assayClassificationQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"assay_class_id\" = ?", o.AssayClassID), + } + + queryMods = append(queryMods, mods...) + + return AssayClassifications(queryMods...) +} + +// Assay pointed to by the foreign key. +func (o *AssayClassMap) Assay(mods ...qm.QueryMod) assayQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"assay_id\" = ?", o.AssayID), + } + + queryMods = append(queryMods, mods...) + + return Assays(queryMods...) +} + +// LoadAssayClass allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (assayClassMapL) LoadAssayClass(ctx context.Context, e boil.ContextExecutor, singular bool, maybeAssayClassMap interface{}, mods queries.Applicator) error { + var slice []*AssayClassMap + var object *AssayClassMap + + if singular { + object = maybeAssayClassMap.(*AssayClassMap) + } else { + slice = *maybeAssayClassMap.(*[]*AssayClassMap) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &assayClassMapR{} + } + args = append(args, object.AssayClassID) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &assayClassMapR{} + } + + for _, a := range args { + if a == obj.AssayClassID { + continue Outer + } + } + + args = append(args, obj.AssayClassID) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`assay_classification`), + qm.WhereIn(`assay_classification.assay_class_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load AssayClassification") + } + + var resultSlice []*AssayClassification + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice AssayClassification") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for assay_classification") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for assay_classification") + } + + if len(assayClassMapAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.AssayClass = foreign + if foreign.R == nil { + foreign.R = &assayClassificationR{} + } + foreign.R.AssayClassAssayClassMaps = append(foreign.R.AssayClassAssayClassMaps, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.AssayClassID == foreign.AssayClassID { + local.R.AssayClass = foreign + if foreign.R == nil { + foreign.R = &assayClassificationR{} + } + foreign.R.AssayClassAssayClassMaps = append(foreign.R.AssayClassAssayClassMaps, local) + break + } + } + } + + return nil +} + +// LoadAssay allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (assayClassMapL) LoadAssay(ctx context.Context, e boil.ContextExecutor, singular bool, maybeAssayClassMap interface{}, mods queries.Applicator) error { + var slice []*AssayClassMap + var object *AssayClassMap + + if singular { + object = maybeAssayClassMap.(*AssayClassMap) + } else { + slice = *maybeAssayClassMap.(*[]*AssayClassMap) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &assayClassMapR{} + } + args = append(args, object.AssayID) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &assayClassMapR{} + } + + for _, a := range args { + if a == obj.AssayID { + continue Outer + } + } + + args = append(args, obj.AssayID) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`assays`), + qm.WhereIn(`assays.assay_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load Assay") + } + + var resultSlice []*Assay + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice Assay") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for assays") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for assays") + } + + if len(assayClassMapAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Assay = foreign + if foreign.R == nil { + foreign.R = &assayR{} + } + foreign.R.AssayClassMaps = append(foreign.R.AssayClassMaps, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.AssayID == foreign.AssayID { + local.R.Assay = foreign + if foreign.R == nil { + foreign.R = &assayR{} + } + foreign.R.AssayClassMaps = append(foreign.R.AssayClassMaps, local) + break + } + } + } + + return nil +} + +// SetAssayClass of the assayClassMap to the related item. +// Sets o.R.AssayClass to related. +// Adds o to related.R.AssayClassAssayClassMaps. +func (o *AssayClassMap) SetAssayClass(ctx context.Context, exec boil.ContextExecutor, insert bool, related *AssayClassification) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"assay_class_map\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"assay_class_id"}), + strmangle.WhereClause("\"", "\"", 0, assayClassMapPrimaryKeyColumns), + ) + values := []interface{}{related.AssayClassID, o.AssCLSMapID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.AssayClassID = related.AssayClassID + if o.R == nil { + o.R = &assayClassMapR{ + AssayClass: related, + } + } else { + o.R.AssayClass = related + } + + if related.R == nil { + related.R = &assayClassificationR{ + AssayClassAssayClassMaps: AssayClassMapSlice{o}, + } + } else { + related.R.AssayClassAssayClassMaps = append(related.R.AssayClassAssayClassMaps, o) + } + + return nil +} + +// SetAssay of the assayClassMap to the related item. +// Sets o.R.Assay to related. +// Adds o to related.R.AssayClassMaps. +func (o *AssayClassMap) SetAssay(ctx context.Context, exec boil.ContextExecutor, insert bool, related *Assay) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"assay_class_map\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"assay_id"}), + strmangle.WhereClause("\"", "\"", 0, assayClassMapPrimaryKeyColumns), + ) + values := []interface{}{related.AssayID, o.AssCLSMapID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.AssayID = related.AssayID + if o.R == nil { + o.R = &assayClassMapR{ + Assay: related, + } + } else { + o.R.Assay = related + } + + if related.R == nil { + related.R = &assayR{ + AssayClassMaps: AssayClassMapSlice{o}, + } + } else { + related.R.AssayClassMaps = append(related.R.AssayClassMaps, o) + } + + return nil +} + +// AssayClassMaps retrieves all the records using an executor. +func AssayClassMaps(mods ...qm.QueryMod) assayClassMapQuery { + mods = append(mods, qm.From("\"assay_class_map\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"assay_class_map\".*"}) + } + + return assayClassMapQuery{q} +} + +// FindAssayClassMap retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindAssayClassMap(ctx context.Context, exec boil.ContextExecutor, assCLSMapID int64, selectCols ...string) (*AssayClassMap, error) { + assayClassMapObj := &AssayClassMap{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"assay_class_map\" where \"ass_cls_map_id\"=?", sel, + ) + + q := queries.Raw(query, assCLSMapID) + + err := q.Bind(ctx, exec, assayClassMapObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from assay_class_map") + } + + if err = assayClassMapObj.doAfterSelectHooks(ctx, exec); err != nil { + return assayClassMapObj, err + } + + return assayClassMapObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *AssayClassMap) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no assay_class_map provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(assayClassMapColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + assayClassMapInsertCacheMut.RLock() + cache, cached := assayClassMapInsertCache[key] + assayClassMapInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + assayClassMapAllColumns, + assayClassMapColumnsWithDefault, + assayClassMapColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(assayClassMapType, assayClassMapMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(assayClassMapType, assayClassMapMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"assay_class_map\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"assay_class_map\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into assay_class_map") + } + + if !cached { + assayClassMapInsertCacheMut.Lock() + assayClassMapInsertCache[key] = cache + assayClassMapInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the AssayClassMap. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *AssayClassMap) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + assayClassMapUpdateCacheMut.RLock() + cache, cached := assayClassMapUpdateCache[key] + assayClassMapUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + assayClassMapAllColumns, + assayClassMapPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update assay_class_map, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"assay_class_map\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, assayClassMapPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(assayClassMapType, assayClassMapMapping, append(wl, assayClassMapPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update assay_class_map row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for assay_class_map") + } + + if !cached { + assayClassMapUpdateCacheMut.Lock() + assayClassMapUpdateCache[key] = cache + assayClassMapUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q assayClassMapQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for assay_class_map") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for assay_class_map") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o AssayClassMapSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), assayClassMapPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"assay_class_map\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, assayClassMapPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in assayClassMap slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all assayClassMap") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *AssayClassMap) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no assay_class_map provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(assayClassMapColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + assayClassMapUpsertCacheMut.RLock() + cache, cached := assayClassMapUpsertCache[key] + assayClassMapUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + assayClassMapAllColumns, + assayClassMapColumnsWithDefault, + assayClassMapColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + assayClassMapAllColumns, + assayClassMapPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert assay_class_map, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(assayClassMapPrimaryKeyColumns)) + copy(conflict, assayClassMapPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"assay_class_map\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(assayClassMapType, assayClassMapMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(assayClassMapType, assayClassMapMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert assay_class_map") + } + + if !cached { + assayClassMapUpsertCacheMut.Lock() + assayClassMapUpsertCache[key] = cache + assayClassMapUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single AssayClassMap record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *AssayClassMap) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no AssayClassMap provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), assayClassMapPrimaryKeyMapping) + sql := "DELETE FROM \"assay_class_map\" WHERE \"ass_cls_map_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from assay_class_map") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for assay_class_map") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q assayClassMapQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no assayClassMapQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from assay_class_map") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for assay_class_map") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o AssayClassMapSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(assayClassMapBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), assayClassMapPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"assay_class_map\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, assayClassMapPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from assayClassMap slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for assay_class_map") + } + + if len(assayClassMapAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *AssayClassMap) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindAssayClassMap(ctx, exec, o.AssCLSMapID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *AssayClassMapSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := AssayClassMapSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), assayClassMapPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"assay_class_map\".* FROM \"assay_class_map\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, assayClassMapPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in AssayClassMapSlice") + } + + *o = slice + + return nil +} + +// AssayClassMapExists checks if the AssayClassMap row exists. +func AssayClassMapExists(ctx context.Context, exec boil.ContextExecutor, assCLSMapID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"assay_class_map\" where \"ass_cls_map_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, assCLSMapID) + } + row := exec.QueryRowContext(ctx, sql, assCLSMapID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if assay_class_map exists") + } + + return exists, nil +} diff --git a/models/assay_classification.go b/models/assay_classification.go new file mode 100644 index 0000000..1a8ae95 --- /dev/null +++ b/models/assay_classification.go @@ -0,0 +1,1094 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// AssayClassification is an object representing the database table. +type AssayClassification struct { + AssayClassID int64 `boil:"assay_class_id" json:"assay_class_id" toml:"assay_class_id" yaml:"assay_class_id"` + L1 null.String `boil:"l1" json:"l1,omitempty" toml:"l1" yaml:"l1,omitempty"` + L2 null.String `boil:"l2" json:"l2,omitempty" toml:"l2" yaml:"l2,omitempty"` + L3 null.String `boil:"l3" json:"l3,omitempty" toml:"l3" yaml:"l3,omitempty"` + ClassType null.String `boil:"class_type" json:"class_type,omitempty" toml:"class_type" yaml:"class_type,omitempty"` + Source null.String `boil:"source" json:"source,omitempty" toml:"source" yaml:"source,omitempty"` + + R *assayClassificationR `boil:"-" json:"-" toml:"-" yaml:"-"` + L assayClassificationL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var AssayClassificationColumns = struct { + AssayClassID string + L1 string + L2 string + L3 string + ClassType string + Source string +}{ + AssayClassID: "assay_class_id", + L1: "l1", + L2: "l2", + L3: "l3", + ClassType: "class_type", + Source: "source", +} + +var AssayClassificationTableColumns = struct { + AssayClassID string + L1 string + L2 string + L3 string + ClassType string + Source string +}{ + AssayClassID: "assay_classification.assay_class_id", + L1: "assay_classification.l1", + L2: "assay_classification.l2", + L3: "assay_classification.l3", + ClassType: "assay_classification.class_type", + Source: "assay_classification.source", +} + +// Generated where + +var AssayClassificationWhere = struct { + AssayClassID whereHelperint64 + L1 whereHelpernull_String + L2 whereHelpernull_String + L3 whereHelpernull_String + ClassType whereHelpernull_String + Source whereHelpernull_String +}{ + AssayClassID: whereHelperint64{field: "\"assay_classification\".\"assay_class_id\""}, + L1: whereHelpernull_String{field: "\"assay_classification\".\"l1\""}, + L2: whereHelpernull_String{field: "\"assay_classification\".\"l2\""}, + L3: whereHelpernull_String{field: "\"assay_classification\".\"l3\""}, + ClassType: whereHelpernull_String{field: "\"assay_classification\".\"class_type\""}, + Source: whereHelpernull_String{field: "\"assay_classification\".\"source\""}, +} + +// AssayClassificationRels is where relationship names are stored. +var AssayClassificationRels = struct { + AssayClassAssayClassMaps string +}{ + AssayClassAssayClassMaps: "AssayClassAssayClassMaps", +} + +// assayClassificationR is where relationships are stored. +type assayClassificationR struct { + AssayClassAssayClassMaps AssayClassMapSlice `boil:"AssayClassAssayClassMaps" json:"AssayClassAssayClassMaps" toml:"AssayClassAssayClassMaps" yaml:"AssayClassAssayClassMaps"` +} + +// NewStruct creates a new relationship struct +func (*assayClassificationR) NewStruct() *assayClassificationR { + return &assayClassificationR{} +} + +func (r *assayClassificationR) GetAssayClassAssayClassMaps() AssayClassMapSlice { + if r == nil { + return nil + } + return r.AssayClassAssayClassMaps +} + +// assayClassificationL is where Load methods for each relationship are stored. +type assayClassificationL struct{} + +var ( + assayClassificationAllColumns = []string{"assay_class_id", "l1", "l2", "l3", "class_type", "source"} + assayClassificationColumnsWithoutDefault = []string{"assay_class_id"} + assayClassificationColumnsWithDefault = []string{"l1", "l2", "l3", "class_type", "source"} + assayClassificationPrimaryKeyColumns = []string{"assay_class_id"} + assayClassificationGeneratedColumns = []string{} +) + +type ( + // AssayClassificationSlice is an alias for a slice of pointers to AssayClassification. + // This should almost always be used instead of []AssayClassification. + AssayClassificationSlice []*AssayClassification + // AssayClassificationHook is the signature for custom AssayClassification hook methods + AssayClassificationHook func(context.Context, boil.ContextExecutor, *AssayClassification) error + + assayClassificationQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + assayClassificationType = reflect.TypeOf(&AssayClassification{}) + assayClassificationMapping = queries.MakeStructMapping(assayClassificationType) + assayClassificationPrimaryKeyMapping, _ = queries.BindMapping(assayClassificationType, assayClassificationMapping, assayClassificationPrimaryKeyColumns) + assayClassificationInsertCacheMut sync.RWMutex + assayClassificationInsertCache = make(map[string]insertCache) + assayClassificationUpdateCacheMut sync.RWMutex + assayClassificationUpdateCache = make(map[string]updateCache) + assayClassificationUpsertCacheMut sync.RWMutex + assayClassificationUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var assayClassificationAfterSelectHooks []AssayClassificationHook + +var assayClassificationBeforeInsertHooks []AssayClassificationHook +var assayClassificationAfterInsertHooks []AssayClassificationHook + +var assayClassificationBeforeUpdateHooks []AssayClassificationHook +var assayClassificationAfterUpdateHooks []AssayClassificationHook + +var assayClassificationBeforeDeleteHooks []AssayClassificationHook +var assayClassificationAfterDeleteHooks []AssayClassificationHook + +var assayClassificationBeforeUpsertHooks []AssayClassificationHook +var assayClassificationAfterUpsertHooks []AssayClassificationHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *AssayClassification) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range assayClassificationAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *AssayClassification) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range assayClassificationBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *AssayClassification) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range assayClassificationAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *AssayClassification) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range assayClassificationBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *AssayClassification) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range assayClassificationAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *AssayClassification) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range assayClassificationBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *AssayClassification) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range assayClassificationAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *AssayClassification) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range assayClassificationBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *AssayClassification) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range assayClassificationAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddAssayClassificationHook registers your hook function for all future operations. +func AddAssayClassificationHook(hookPoint boil.HookPoint, assayClassificationHook AssayClassificationHook) { + switch hookPoint { + case boil.AfterSelectHook: + assayClassificationAfterSelectHooks = append(assayClassificationAfterSelectHooks, assayClassificationHook) + case boil.BeforeInsertHook: + assayClassificationBeforeInsertHooks = append(assayClassificationBeforeInsertHooks, assayClassificationHook) + case boil.AfterInsertHook: + assayClassificationAfterInsertHooks = append(assayClassificationAfterInsertHooks, assayClassificationHook) + case boil.BeforeUpdateHook: + assayClassificationBeforeUpdateHooks = append(assayClassificationBeforeUpdateHooks, assayClassificationHook) + case boil.AfterUpdateHook: + assayClassificationAfterUpdateHooks = append(assayClassificationAfterUpdateHooks, assayClassificationHook) + case boil.BeforeDeleteHook: + assayClassificationBeforeDeleteHooks = append(assayClassificationBeforeDeleteHooks, assayClassificationHook) + case boil.AfterDeleteHook: + assayClassificationAfterDeleteHooks = append(assayClassificationAfterDeleteHooks, assayClassificationHook) + case boil.BeforeUpsertHook: + assayClassificationBeforeUpsertHooks = append(assayClassificationBeforeUpsertHooks, assayClassificationHook) + case boil.AfterUpsertHook: + assayClassificationAfterUpsertHooks = append(assayClassificationAfterUpsertHooks, assayClassificationHook) + } +} + +// One returns a single assayClassification record from the query. +func (q assayClassificationQuery) One(ctx context.Context, exec boil.ContextExecutor) (*AssayClassification, error) { + o := &AssayClassification{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for assay_classification") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all AssayClassification records from the query. +func (q assayClassificationQuery) All(ctx context.Context, exec boil.ContextExecutor) (AssayClassificationSlice, error) { + var o []*AssayClassification + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to AssayClassification slice") + } + + if len(assayClassificationAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all AssayClassification records in the query. +func (q assayClassificationQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count assay_classification rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q assayClassificationQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if assay_classification exists") + } + + return count > 0, nil +} + +// AssayClassAssayClassMaps retrieves all the assay_class_map's AssayClassMaps with an executor via assay_class_id column. +func (o *AssayClassification) AssayClassAssayClassMaps(mods ...qm.QueryMod) assayClassMapQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"assay_class_map\".\"assay_class_id\"=?", o.AssayClassID), + ) + + return AssayClassMaps(queryMods...) +} + +// LoadAssayClassAssayClassMaps allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (assayClassificationL) LoadAssayClassAssayClassMaps(ctx context.Context, e boil.ContextExecutor, singular bool, maybeAssayClassification interface{}, mods queries.Applicator) error { + var slice []*AssayClassification + var object *AssayClassification + + if singular { + object = maybeAssayClassification.(*AssayClassification) + } else { + slice = *maybeAssayClassification.(*[]*AssayClassification) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &assayClassificationR{} + } + args = append(args, object.AssayClassID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &assayClassificationR{} + } + + for _, a := range args { + if a == obj.AssayClassID { + continue Outer + } + } + + args = append(args, obj.AssayClassID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`assay_class_map`), + qm.WhereIn(`assay_class_map.assay_class_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load assay_class_map") + } + + var resultSlice []*AssayClassMap + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice assay_class_map") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on assay_class_map") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for assay_class_map") + } + + if len(assayClassMapAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.AssayClassAssayClassMaps = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &assayClassMapR{} + } + foreign.R.AssayClass = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.AssayClassID == foreign.AssayClassID { + local.R.AssayClassAssayClassMaps = append(local.R.AssayClassAssayClassMaps, foreign) + if foreign.R == nil { + foreign.R = &assayClassMapR{} + } + foreign.R.AssayClass = local + break + } + } + } + + return nil +} + +// AddAssayClassAssayClassMaps adds the given related objects to the existing relationships +// of the assay_classification, optionally inserting them as new records. +// Appends related to o.R.AssayClassAssayClassMaps. +// Sets related.R.AssayClass appropriately. +func (o *AssayClassification) AddAssayClassAssayClassMaps(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*AssayClassMap) error { + var err error + for _, rel := range related { + if insert { + rel.AssayClassID = o.AssayClassID + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"assay_class_map\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"assay_class_id"}), + strmangle.WhereClause("\"", "\"", 0, assayClassMapPrimaryKeyColumns), + ) + values := []interface{}{o.AssayClassID, rel.AssCLSMapID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.AssayClassID = o.AssayClassID + } + } + + if o.R == nil { + o.R = &assayClassificationR{ + AssayClassAssayClassMaps: related, + } + } else { + o.R.AssayClassAssayClassMaps = append(o.R.AssayClassAssayClassMaps, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &assayClassMapR{ + AssayClass: o, + } + } else { + rel.R.AssayClass = o + } + } + return nil +} + +// AssayClassifications retrieves all the records using an executor. +func AssayClassifications(mods ...qm.QueryMod) assayClassificationQuery { + mods = append(mods, qm.From("\"assay_classification\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"assay_classification\".*"}) + } + + return assayClassificationQuery{q} +} + +// FindAssayClassification retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindAssayClassification(ctx context.Context, exec boil.ContextExecutor, assayClassID int64, selectCols ...string) (*AssayClassification, error) { + assayClassificationObj := &AssayClassification{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"assay_classification\" where \"assay_class_id\"=?", sel, + ) + + q := queries.Raw(query, assayClassID) + + err := q.Bind(ctx, exec, assayClassificationObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from assay_classification") + } + + if err = assayClassificationObj.doAfterSelectHooks(ctx, exec); err != nil { + return assayClassificationObj, err + } + + return assayClassificationObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *AssayClassification) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no assay_classification provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(assayClassificationColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + assayClassificationInsertCacheMut.RLock() + cache, cached := assayClassificationInsertCache[key] + assayClassificationInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + assayClassificationAllColumns, + assayClassificationColumnsWithDefault, + assayClassificationColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(assayClassificationType, assayClassificationMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(assayClassificationType, assayClassificationMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"assay_classification\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"assay_classification\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into assay_classification") + } + + if !cached { + assayClassificationInsertCacheMut.Lock() + assayClassificationInsertCache[key] = cache + assayClassificationInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the AssayClassification. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *AssayClassification) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + assayClassificationUpdateCacheMut.RLock() + cache, cached := assayClassificationUpdateCache[key] + assayClassificationUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + assayClassificationAllColumns, + assayClassificationPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update assay_classification, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"assay_classification\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, assayClassificationPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(assayClassificationType, assayClassificationMapping, append(wl, assayClassificationPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update assay_classification row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for assay_classification") + } + + if !cached { + assayClassificationUpdateCacheMut.Lock() + assayClassificationUpdateCache[key] = cache + assayClassificationUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q assayClassificationQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for assay_classification") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for assay_classification") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o AssayClassificationSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), assayClassificationPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"assay_classification\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, assayClassificationPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in assayClassification slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all assayClassification") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *AssayClassification) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no assay_classification provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(assayClassificationColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + assayClassificationUpsertCacheMut.RLock() + cache, cached := assayClassificationUpsertCache[key] + assayClassificationUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + assayClassificationAllColumns, + assayClassificationColumnsWithDefault, + assayClassificationColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + assayClassificationAllColumns, + assayClassificationPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert assay_classification, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(assayClassificationPrimaryKeyColumns)) + copy(conflict, assayClassificationPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"assay_classification\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(assayClassificationType, assayClassificationMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(assayClassificationType, assayClassificationMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert assay_classification") + } + + if !cached { + assayClassificationUpsertCacheMut.Lock() + assayClassificationUpsertCache[key] = cache + assayClassificationUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single AssayClassification record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *AssayClassification) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no AssayClassification provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), assayClassificationPrimaryKeyMapping) + sql := "DELETE FROM \"assay_classification\" WHERE \"assay_class_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from assay_classification") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for assay_classification") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q assayClassificationQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no assayClassificationQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from assay_classification") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for assay_classification") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o AssayClassificationSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(assayClassificationBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), assayClassificationPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"assay_classification\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, assayClassificationPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from assayClassification slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for assay_classification") + } + + if len(assayClassificationAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *AssayClassification) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindAssayClassification(ctx, exec, o.AssayClassID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *AssayClassificationSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := AssayClassificationSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), assayClassificationPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"assay_classification\".* FROM \"assay_classification\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, assayClassificationPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in AssayClassificationSlice") + } + + *o = slice + + return nil +} + +// AssayClassificationExists checks if the AssayClassification row exists. +func AssayClassificationExists(ctx context.Context, exec boil.ContextExecutor, assayClassID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"assay_classification\" where \"assay_class_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, assayClassID) + } + row := exec.QueryRowContext(ctx, sql, assayClassID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if assay_classification exists") + } + + return exists, nil +} diff --git a/models/assay_parameters.go b/models/assay_parameters.go new file mode 100644 index 0000000..b90acbc --- /dev/null +++ b/models/assay_parameters.go @@ -0,0 +1,1141 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/sqlboiler/v4/types" + "github.com/volatiletech/strmangle" +) + +// AssayParameter is an object representing the database table. +type AssayParameter struct { + AssayParamID int64 `boil:"assay_param_id" json:"assay_param_id" toml:"assay_param_id" yaml:"assay_param_id"` + AssayID int64 `boil:"assay_id" json:"assay_id" toml:"assay_id" yaml:"assay_id"` + Type string `boil:"type" json:"type" toml:"type" yaml:"type"` + Relation null.String `boil:"relation" json:"relation,omitempty" toml:"relation" yaml:"relation,omitempty"` + Value types.NullDecimal `boil:"value" json:"value,omitempty" toml:"value" yaml:"value,omitempty"` + Units null.String `boil:"units" json:"units,omitempty" toml:"units" yaml:"units,omitempty"` + TextValue null.String `boil:"text_value" json:"text_value,omitempty" toml:"text_value" yaml:"text_value,omitempty"` + StandardType null.String `boil:"standard_type" json:"standard_type,omitempty" toml:"standard_type" yaml:"standard_type,omitempty"` + StandardRelation null.String `boil:"standard_relation" json:"standard_relation,omitempty" toml:"standard_relation" yaml:"standard_relation,omitempty"` + StandardValue types.NullDecimal `boil:"standard_value" json:"standard_value,omitempty" toml:"standard_value" yaml:"standard_value,omitempty"` + StandardUnits null.String `boil:"standard_units" json:"standard_units,omitempty" toml:"standard_units" yaml:"standard_units,omitempty"` + StandardTextValue null.String `boil:"standard_text_value" json:"standard_text_value,omitempty" toml:"standard_text_value" yaml:"standard_text_value,omitempty"` + Comments null.String `boil:"comments" json:"comments,omitempty" toml:"comments" yaml:"comments,omitempty"` + + R *assayParameterR `boil:"-" json:"-" toml:"-" yaml:"-"` + L assayParameterL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var AssayParameterColumns = struct { + AssayParamID string + AssayID string + Type string + Relation string + Value string + Units string + TextValue string + StandardType string + StandardRelation string + StandardValue string + StandardUnits string + StandardTextValue string + Comments string +}{ + AssayParamID: "assay_param_id", + AssayID: "assay_id", + Type: "type", + Relation: "relation", + Value: "value", + Units: "units", + TextValue: "text_value", + StandardType: "standard_type", + StandardRelation: "standard_relation", + StandardValue: "standard_value", + StandardUnits: "standard_units", + StandardTextValue: "standard_text_value", + Comments: "comments", +} + +var AssayParameterTableColumns = struct { + AssayParamID string + AssayID string + Type string + Relation string + Value string + Units string + TextValue string + StandardType string + StandardRelation string + StandardValue string + StandardUnits string + StandardTextValue string + Comments string +}{ + AssayParamID: "assay_parameters.assay_param_id", + AssayID: "assay_parameters.assay_id", + Type: "assay_parameters.type", + Relation: "assay_parameters.relation", + Value: "assay_parameters.value", + Units: "assay_parameters.units", + TextValue: "assay_parameters.text_value", + StandardType: "assay_parameters.standard_type", + StandardRelation: "assay_parameters.standard_relation", + StandardValue: "assay_parameters.standard_value", + StandardUnits: "assay_parameters.standard_units", + StandardTextValue: "assay_parameters.standard_text_value", + Comments: "assay_parameters.comments", +} + +// Generated where + +var AssayParameterWhere = struct { + AssayParamID whereHelperint64 + AssayID whereHelperint64 + Type whereHelperstring + Relation whereHelpernull_String + Value whereHelpertypes_NullDecimal + Units whereHelpernull_String + TextValue whereHelpernull_String + StandardType whereHelpernull_String + StandardRelation whereHelpernull_String + StandardValue whereHelpertypes_NullDecimal + StandardUnits whereHelpernull_String + StandardTextValue whereHelpernull_String + Comments whereHelpernull_String +}{ + AssayParamID: whereHelperint64{field: "\"assay_parameters\".\"assay_param_id\""}, + AssayID: whereHelperint64{field: "\"assay_parameters\".\"assay_id\""}, + Type: whereHelperstring{field: "\"assay_parameters\".\"type\""}, + Relation: whereHelpernull_String{field: "\"assay_parameters\".\"relation\""}, + Value: whereHelpertypes_NullDecimal{field: "\"assay_parameters\".\"value\""}, + Units: whereHelpernull_String{field: "\"assay_parameters\".\"units\""}, + TextValue: whereHelpernull_String{field: "\"assay_parameters\".\"text_value\""}, + StandardType: whereHelpernull_String{field: "\"assay_parameters\".\"standard_type\""}, + StandardRelation: whereHelpernull_String{field: "\"assay_parameters\".\"standard_relation\""}, + StandardValue: whereHelpertypes_NullDecimal{field: "\"assay_parameters\".\"standard_value\""}, + StandardUnits: whereHelpernull_String{field: "\"assay_parameters\".\"standard_units\""}, + StandardTextValue: whereHelpernull_String{field: "\"assay_parameters\".\"standard_text_value\""}, + Comments: whereHelpernull_String{field: "\"assay_parameters\".\"comments\""}, +} + +// AssayParameterRels is where relationship names are stored. +var AssayParameterRels = struct { + Assay string +}{ + Assay: "Assay", +} + +// assayParameterR is where relationships are stored. +type assayParameterR struct { + Assay *Assay `boil:"Assay" json:"Assay" toml:"Assay" yaml:"Assay"` +} + +// NewStruct creates a new relationship struct +func (*assayParameterR) NewStruct() *assayParameterR { + return &assayParameterR{} +} + +func (r *assayParameterR) GetAssay() *Assay { + if r == nil { + return nil + } + return r.Assay +} + +// assayParameterL is where Load methods for each relationship are stored. +type assayParameterL struct{} + +var ( + assayParameterAllColumns = []string{"assay_param_id", "assay_id", "type", "relation", "value", "units", "text_value", "standard_type", "standard_relation", "standard_value", "standard_units", "standard_text_value", "comments"} + assayParameterColumnsWithoutDefault = []string{"assay_param_id", "assay_id", "type"} + assayParameterColumnsWithDefault = []string{"relation", "value", "units", "text_value", "standard_type", "standard_relation", "standard_value", "standard_units", "standard_text_value", "comments"} + assayParameterPrimaryKeyColumns = []string{"assay_param_id"} + assayParameterGeneratedColumns = []string{} +) + +type ( + // AssayParameterSlice is an alias for a slice of pointers to AssayParameter. + // This should almost always be used instead of []AssayParameter. + AssayParameterSlice []*AssayParameter + // AssayParameterHook is the signature for custom AssayParameter hook methods + AssayParameterHook func(context.Context, boil.ContextExecutor, *AssayParameter) error + + assayParameterQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + assayParameterType = reflect.TypeOf(&AssayParameter{}) + assayParameterMapping = queries.MakeStructMapping(assayParameterType) + assayParameterPrimaryKeyMapping, _ = queries.BindMapping(assayParameterType, assayParameterMapping, assayParameterPrimaryKeyColumns) + assayParameterInsertCacheMut sync.RWMutex + assayParameterInsertCache = make(map[string]insertCache) + assayParameterUpdateCacheMut sync.RWMutex + assayParameterUpdateCache = make(map[string]updateCache) + assayParameterUpsertCacheMut sync.RWMutex + assayParameterUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var assayParameterAfterSelectHooks []AssayParameterHook + +var assayParameterBeforeInsertHooks []AssayParameterHook +var assayParameterAfterInsertHooks []AssayParameterHook + +var assayParameterBeforeUpdateHooks []AssayParameterHook +var assayParameterAfterUpdateHooks []AssayParameterHook + +var assayParameterBeforeDeleteHooks []AssayParameterHook +var assayParameterAfterDeleteHooks []AssayParameterHook + +var assayParameterBeforeUpsertHooks []AssayParameterHook +var assayParameterAfterUpsertHooks []AssayParameterHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *AssayParameter) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range assayParameterAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *AssayParameter) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range assayParameterBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *AssayParameter) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range assayParameterAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *AssayParameter) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range assayParameterBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *AssayParameter) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range assayParameterAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *AssayParameter) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range assayParameterBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *AssayParameter) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range assayParameterAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *AssayParameter) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range assayParameterBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *AssayParameter) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range assayParameterAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddAssayParameterHook registers your hook function for all future operations. +func AddAssayParameterHook(hookPoint boil.HookPoint, assayParameterHook AssayParameterHook) { + switch hookPoint { + case boil.AfterSelectHook: + assayParameterAfterSelectHooks = append(assayParameterAfterSelectHooks, assayParameterHook) + case boil.BeforeInsertHook: + assayParameterBeforeInsertHooks = append(assayParameterBeforeInsertHooks, assayParameterHook) + case boil.AfterInsertHook: + assayParameterAfterInsertHooks = append(assayParameterAfterInsertHooks, assayParameterHook) + case boil.BeforeUpdateHook: + assayParameterBeforeUpdateHooks = append(assayParameterBeforeUpdateHooks, assayParameterHook) + case boil.AfterUpdateHook: + assayParameterAfterUpdateHooks = append(assayParameterAfterUpdateHooks, assayParameterHook) + case boil.BeforeDeleteHook: + assayParameterBeforeDeleteHooks = append(assayParameterBeforeDeleteHooks, assayParameterHook) + case boil.AfterDeleteHook: + assayParameterAfterDeleteHooks = append(assayParameterAfterDeleteHooks, assayParameterHook) + case boil.BeforeUpsertHook: + assayParameterBeforeUpsertHooks = append(assayParameterBeforeUpsertHooks, assayParameterHook) + case boil.AfterUpsertHook: + assayParameterAfterUpsertHooks = append(assayParameterAfterUpsertHooks, assayParameterHook) + } +} + +// One returns a single assayParameter record from the query. +func (q assayParameterQuery) One(ctx context.Context, exec boil.ContextExecutor) (*AssayParameter, error) { + o := &AssayParameter{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for assay_parameters") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all AssayParameter records from the query. +func (q assayParameterQuery) All(ctx context.Context, exec boil.ContextExecutor) (AssayParameterSlice, error) { + var o []*AssayParameter + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to AssayParameter slice") + } + + if len(assayParameterAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all AssayParameter records in the query. +func (q assayParameterQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count assay_parameters rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q assayParameterQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if assay_parameters exists") + } + + return count > 0, nil +} + +// Assay pointed to by the foreign key. +func (o *AssayParameter) Assay(mods ...qm.QueryMod) assayQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"assay_id\" = ?", o.AssayID), + } + + queryMods = append(queryMods, mods...) + + return Assays(queryMods...) +} + +// LoadAssay allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (assayParameterL) LoadAssay(ctx context.Context, e boil.ContextExecutor, singular bool, maybeAssayParameter interface{}, mods queries.Applicator) error { + var slice []*AssayParameter + var object *AssayParameter + + if singular { + object = maybeAssayParameter.(*AssayParameter) + } else { + slice = *maybeAssayParameter.(*[]*AssayParameter) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &assayParameterR{} + } + args = append(args, object.AssayID) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &assayParameterR{} + } + + for _, a := range args { + if a == obj.AssayID { + continue Outer + } + } + + args = append(args, obj.AssayID) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`assays`), + qm.WhereIn(`assays.assay_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load Assay") + } + + var resultSlice []*Assay + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice Assay") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for assays") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for assays") + } + + if len(assayParameterAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Assay = foreign + if foreign.R == nil { + foreign.R = &assayR{} + } + foreign.R.AssayParameters = append(foreign.R.AssayParameters, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.AssayID == foreign.AssayID { + local.R.Assay = foreign + if foreign.R == nil { + foreign.R = &assayR{} + } + foreign.R.AssayParameters = append(foreign.R.AssayParameters, local) + break + } + } + } + + return nil +} + +// SetAssay of the assayParameter to the related item. +// Sets o.R.Assay to related. +// Adds o to related.R.AssayParameters. +func (o *AssayParameter) SetAssay(ctx context.Context, exec boil.ContextExecutor, insert bool, related *Assay) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"assay_parameters\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"assay_id"}), + strmangle.WhereClause("\"", "\"", 0, assayParameterPrimaryKeyColumns), + ) + values := []interface{}{related.AssayID, o.AssayParamID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.AssayID = related.AssayID + if o.R == nil { + o.R = &assayParameterR{ + Assay: related, + } + } else { + o.R.Assay = related + } + + if related.R == nil { + related.R = &assayR{ + AssayParameters: AssayParameterSlice{o}, + } + } else { + related.R.AssayParameters = append(related.R.AssayParameters, o) + } + + return nil +} + +// AssayParameters retrieves all the records using an executor. +func AssayParameters(mods ...qm.QueryMod) assayParameterQuery { + mods = append(mods, qm.From("\"assay_parameters\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"assay_parameters\".*"}) + } + + return assayParameterQuery{q} +} + +// FindAssayParameter retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindAssayParameter(ctx context.Context, exec boil.ContextExecutor, assayParamID int64, selectCols ...string) (*AssayParameter, error) { + assayParameterObj := &AssayParameter{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"assay_parameters\" where \"assay_param_id\"=?", sel, + ) + + q := queries.Raw(query, assayParamID) + + err := q.Bind(ctx, exec, assayParameterObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from assay_parameters") + } + + if err = assayParameterObj.doAfterSelectHooks(ctx, exec); err != nil { + return assayParameterObj, err + } + + return assayParameterObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *AssayParameter) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no assay_parameters provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(assayParameterColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + assayParameterInsertCacheMut.RLock() + cache, cached := assayParameterInsertCache[key] + assayParameterInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + assayParameterAllColumns, + assayParameterColumnsWithDefault, + assayParameterColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(assayParameterType, assayParameterMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(assayParameterType, assayParameterMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"assay_parameters\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"assay_parameters\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into assay_parameters") + } + + if !cached { + assayParameterInsertCacheMut.Lock() + assayParameterInsertCache[key] = cache + assayParameterInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the AssayParameter. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *AssayParameter) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + assayParameterUpdateCacheMut.RLock() + cache, cached := assayParameterUpdateCache[key] + assayParameterUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + assayParameterAllColumns, + assayParameterPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update assay_parameters, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"assay_parameters\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, assayParameterPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(assayParameterType, assayParameterMapping, append(wl, assayParameterPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update assay_parameters row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for assay_parameters") + } + + if !cached { + assayParameterUpdateCacheMut.Lock() + assayParameterUpdateCache[key] = cache + assayParameterUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q assayParameterQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for assay_parameters") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for assay_parameters") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o AssayParameterSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), assayParameterPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"assay_parameters\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, assayParameterPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in assayParameter slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all assayParameter") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *AssayParameter) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no assay_parameters provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(assayParameterColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + assayParameterUpsertCacheMut.RLock() + cache, cached := assayParameterUpsertCache[key] + assayParameterUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + assayParameterAllColumns, + assayParameterColumnsWithDefault, + assayParameterColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + assayParameterAllColumns, + assayParameterPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert assay_parameters, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(assayParameterPrimaryKeyColumns)) + copy(conflict, assayParameterPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"assay_parameters\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(assayParameterType, assayParameterMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(assayParameterType, assayParameterMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert assay_parameters") + } + + if !cached { + assayParameterUpsertCacheMut.Lock() + assayParameterUpsertCache[key] = cache + assayParameterUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single AssayParameter record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *AssayParameter) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no AssayParameter provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), assayParameterPrimaryKeyMapping) + sql := "DELETE FROM \"assay_parameters\" WHERE \"assay_param_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from assay_parameters") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for assay_parameters") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q assayParameterQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no assayParameterQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from assay_parameters") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for assay_parameters") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o AssayParameterSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(assayParameterBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), assayParameterPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"assay_parameters\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, assayParameterPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from assayParameter slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for assay_parameters") + } + + if len(assayParameterAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *AssayParameter) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindAssayParameter(ctx, exec, o.AssayParamID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *AssayParameterSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := AssayParameterSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), assayParameterPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"assay_parameters\".* FROM \"assay_parameters\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, assayParameterPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in AssayParameterSlice") + } + + *o = slice + + return nil +} + +// AssayParameterExists checks if the AssayParameter row exists. +func AssayParameterExists(ctx context.Context, exec boil.ContextExecutor, assayParamID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"assay_parameters\" where \"assay_param_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, assayParamID) + } + row := exec.QueryRowContext(ctx, sql, assayParamID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if assay_parameters exists") + } + + return exists, nil +} diff --git a/models/assay_type.go b/models/assay_type.go new file mode 100644 index 0000000..44ed20c --- /dev/null +++ b/models/assay_type.go @@ -0,0 +1,1140 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// AssayType is an object representing the database table. +type AssayType struct { + AssayType string `boil:"assay_type" json:"assay_type" toml:"assay_type" yaml:"assay_type"` + AssayDesc null.String `boil:"assay_desc" json:"assay_desc,omitempty" toml:"assay_desc" yaml:"assay_desc,omitempty"` + + R *assayTypeR `boil:"-" json:"-" toml:"-" yaml:"-"` + L assayTypeL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var AssayTypeColumns = struct { + AssayType string + AssayDesc string +}{ + AssayType: "assay_type", + AssayDesc: "assay_desc", +} + +var AssayTypeTableColumns = struct { + AssayType string + AssayDesc string +}{ + AssayType: "assay_type.assay_type", + AssayDesc: "assay_type.assay_desc", +} + +// Generated where + +var AssayTypeWhere = struct { + AssayType whereHelperstring + AssayDesc whereHelpernull_String +}{ + AssayType: whereHelperstring{field: "\"assay_type\".\"assay_type\""}, + AssayDesc: whereHelpernull_String{field: "\"assay_type\".\"assay_desc\""}, +} + +// AssayTypeRels is where relationship names are stored. +var AssayTypeRels = struct { + Assays string +}{ + Assays: "Assays", +} + +// assayTypeR is where relationships are stored. +type assayTypeR struct { + Assays AssaySlice `boil:"Assays" json:"Assays" toml:"Assays" yaml:"Assays"` +} + +// NewStruct creates a new relationship struct +func (*assayTypeR) NewStruct() *assayTypeR { + return &assayTypeR{} +} + +func (r *assayTypeR) GetAssays() AssaySlice { + if r == nil { + return nil + } + return r.Assays +} + +// assayTypeL is where Load methods for each relationship are stored. +type assayTypeL struct{} + +var ( + assayTypeAllColumns = []string{"assay_type", "assay_desc"} + assayTypeColumnsWithoutDefault = []string{"assay_type"} + assayTypeColumnsWithDefault = []string{"assay_desc"} + assayTypePrimaryKeyColumns = []string{"assay_type"} + assayTypeGeneratedColumns = []string{} +) + +type ( + // AssayTypeSlice is an alias for a slice of pointers to AssayType. + // This should almost always be used instead of []AssayType. + AssayTypeSlice []*AssayType + // AssayTypeHook is the signature for custom AssayType hook methods + AssayTypeHook func(context.Context, boil.ContextExecutor, *AssayType) error + + assayTypeQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + assayTypeType = reflect.TypeOf(&AssayType{}) + assayTypeMapping = queries.MakeStructMapping(assayTypeType) + assayTypePrimaryKeyMapping, _ = queries.BindMapping(assayTypeType, assayTypeMapping, assayTypePrimaryKeyColumns) + assayTypeInsertCacheMut sync.RWMutex + assayTypeInsertCache = make(map[string]insertCache) + assayTypeUpdateCacheMut sync.RWMutex + assayTypeUpdateCache = make(map[string]updateCache) + assayTypeUpsertCacheMut sync.RWMutex + assayTypeUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var assayTypeAfterSelectHooks []AssayTypeHook + +var assayTypeBeforeInsertHooks []AssayTypeHook +var assayTypeAfterInsertHooks []AssayTypeHook + +var assayTypeBeforeUpdateHooks []AssayTypeHook +var assayTypeAfterUpdateHooks []AssayTypeHook + +var assayTypeBeforeDeleteHooks []AssayTypeHook +var assayTypeAfterDeleteHooks []AssayTypeHook + +var assayTypeBeforeUpsertHooks []AssayTypeHook +var assayTypeAfterUpsertHooks []AssayTypeHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *AssayType) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range assayTypeAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *AssayType) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range assayTypeBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *AssayType) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range assayTypeAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *AssayType) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range assayTypeBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *AssayType) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range assayTypeAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *AssayType) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range assayTypeBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *AssayType) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range assayTypeAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *AssayType) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range assayTypeBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *AssayType) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range assayTypeAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddAssayTypeHook registers your hook function for all future operations. +func AddAssayTypeHook(hookPoint boil.HookPoint, assayTypeHook AssayTypeHook) { + switch hookPoint { + case boil.AfterSelectHook: + assayTypeAfterSelectHooks = append(assayTypeAfterSelectHooks, assayTypeHook) + case boil.BeforeInsertHook: + assayTypeBeforeInsertHooks = append(assayTypeBeforeInsertHooks, assayTypeHook) + case boil.AfterInsertHook: + assayTypeAfterInsertHooks = append(assayTypeAfterInsertHooks, assayTypeHook) + case boil.BeforeUpdateHook: + assayTypeBeforeUpdateHooks = append(assayTypeBeforeUpdateHooks, assayTypeHook) + case boil.AfterUpdateHook: + assayTypeAfterUpdateHooks = append(assayTypeAfterUpdateHooks, assayTypeHook) + case boil.BeforeDeleteHook: + assayTypeBeforeDeleteHooks = append(assayTypeBeforeDeleteHooks, assayTypeHook) + case boil.AfterDeleteHook: + assayTypeAfterDeleteHooks = append(assayTypeAfterDeleteHooks, assayTypeHook) + case boil.BeforeUpsertHook: + assayTypeBeforeUpsertHooks = append(assayTypeBeforeUpsertHooks, assayTypeHook) + case boil.AfterUpsertHook: + assayTypeAfterUpsertHooks = append(assayTypeAfterUpsertHooks, assayTypeHook) + } +} + +// One returns a single assayType record from the query. +func (q assayTypeQuery) One(ctx context.Context, exec boil.ContextExecutor) (*AssayType, error) { + o := &AssayType{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for assay_type") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all AssayType records from the query. +func (q assayTypeQuery) All(ctx context.Context, exec boil.ContextExecutor) (AssayTypeSlice, error) { + var o []*AssayType + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to AssayType slice") + } + + if len(assayTypeAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all AssayType records in the query. +func (q assayTypeQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count assay_type rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q assayTypeQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if assay_type exists") + } + + return count > 0, nil +} + +// Assays retrieves all the assay's Assays with an executor. +func (o *AssayType) Assays(mods ...qm.QueryMod) assayQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"assays\".\"assay_type\"=?", o.AssayType), + ) + + return Assays(queryMods...) +} + +// LoadAssays allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (assayTypeL) LoadAssays(ctx context.Context, e boil.ContextExecutor, singular bool, maybeAssayType interface{}, mods queries.Applicator) error { + var slice []*AssayType + var object *AssayType + + if singular { + object = maybeAssayType.(*AssayType) + } else { + slice = *maybeAssayType.(*[]*AssayType) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &assayTypeR{} + } + args = append(args, object.AssayType) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &assayTypeR{} + } + + for _, a := range args { + if queries.Equal(a, obj.AssayType) { + continue Outer + } + } + + args = append(args, obj.AssayType) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`assays`), + qm.WhereIn(`assays.assay_type in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load assays") + } + + var resultSlice []*Assay + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice assays") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on assays") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for assays") + } + + if len(assayAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.Assays = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &assayR{} + } + foreign.R.AssayAssayType = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.AssayType, foreign.AssayType) { + local.R.Assays = append(local.R.Assays, foreign) + if foreign.R == nil { + foreign.R = &assayR{} + } + foreign.R.AssayAssayType = local + break + } + } + } + + return nil +} + +// AddAssays adds the given related objects to the existing relationships +// of the assay_type, optionally inserting them as new records. +// Appends related to o.R.Assays. +// Sets related.R.AssayAssayType appropriately. +func (o *AssayType) AddAssays(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Assay) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.AssayType, o.AssayType) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"assays\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"assay_type"}), + strmangle.WhereClause("\"", "\"", 0, assayPrimaryKeyColumns), + ) + values := []interface{}{o.AssayType, rel.AssayID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.AssayType, o.AssayType) + } + } + + if o.R == nil { + o.R = &assayTypeR{ + Assays: related, + } + } else { + o.R.Assays = append(o.R.Assays, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &assayR{ + AssayAssayType: o, + } + } else { + rel.R.AssayAssayType = o + } + } + return nil +} + +// SetAssays removes all previously related items of the +// assay_type replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.AssayAssayType's Assays accordingly. +// Replaces o.R.Assays with related. +// Sets related.R.AssayAssayType's Assays accordingly. +func (o *AssayType) SetAssays(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Assay) error { + query := "update \"assays\" set \"assay_type\" = null where \"assay_type\" = ?" + values := []interface{}{o.AssayType} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + if o.R != nil { + for _, rel := range o.R.Assays { + queries.SetScanner(&rel.AssayType, nil) + if rel.R == nil { + continue + } + + rel.R.AssayAssayType = nil + } + o.R.Assays = nil + } + + return o.AddAssays(ctx, exec, insert, related...) +} + +// RemoveAssays relationships from objects passed in. +// Removes related items from R.Assays (uses pointer comparison, removal does not keep order) +// Sets related.R.AssayAssayType. +func (o *AssayType) RemoveAssays(ctx context.Context, exec boil.ContextExecutor, related ...*Assay) error { + if len(related) == 0 { + return nil + } + + var err error + for _, rel := range related { + queries.SetScanner(&rel.AssayType, nil) + if rel.R != nil { + rel.R.AssayAssayType = nil + } + if _, err = rel.Update(ctx, exec, boil.Whitelist("assay_type")); err != nil { + return err + } + } + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.Assays { + if rel != ri { + continue + } + + ln := len(o.R.Assays) + if ln > 1 && i < ln-1 { + o.R.Assays[i] = o.R.Assays[ln-1] + } + o.R.Assays = o.R.Assays[:ln-1] + break + } + } + + return nil +} + +// AssayTypes retrieves all the records using an executor. +func AssayTypes(mods ...qm.QueryMod) assayTypeQuery { + mods = append(mods, qm.From("\"assay_type\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"assay_type\".*"}) + } + + return assayTypeQuery{q} +} + +// FindAssayType retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindAssayType(ctx context.Context, exec boil.ContextExecutor, assayType string, selectCols ...string) (*AssayType, error) { + assayTypeObj := &AssayType{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"assay_type\" where \"assay_type\"=?", sel, + ) + + q := queries.Raw(query, assayType) + + err := q.Bind(ctx, exec, assayTypeObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from assay_type") + } + + if err = assayTypeObj.doAfterSelectHooks(ctx, exec); err != nil { + return assayTypeObj, err + } + + return assayTypeObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *AssayType) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no assay_type provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(assayTypeColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + assayTypeInsertCacheMut.RLock() + cache, cached := assayTypeInsertCache[key] + assayTypeInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + assayTypeAllColumns, + assayTypeColumnsWithDefault, + assayTypeColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(assayTypeType, assayTypeMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(assayTypeType, assayTypeMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"assay_type\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"assay_type\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into assay_type") + } + + if !cached { + assayTypeInsertCacheMut.Lock() + assayTypeInsertCache[key] = cache + assayTypeInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the AssayType. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *AssayType) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + assayTypeUpdateCacheMut.RLock() + cache, cached := assayTypeUpdateCache[key] + assayTypeUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + assayTypeAllColumns, + assayTypePrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update assay_type, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"assay_type\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, assayTypePrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(assayTypeType, assayTypeMapping, append(wl, assayTypePrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update assay_type row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for assay_type") + } + + if !cached { + assayTypeUpdateCacheMut.Lock() + assayTypeUpdateCache[key] = cache + assayTypeUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q assayTypeQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for assay_type") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for assay_type") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o AssayTypeSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), assayTypePrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"assay_type\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, assayTypePrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in assayType slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all assayType") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *AssayType) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no assay_type provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(assayTypeColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + assayTypeUpsertCacheMut.RLock() + cache, cached := assayTypeUpsertCache[key] + assayTypeUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + assayTypeAllColumns, + assayTypeColumnsWithDefault, + assayTypeColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + assayTypeAllColumns, + assayTypePrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert assay_type, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(assayTypePrimaryKeyColumns)) + copy(conflict, assayTypePrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"assay_type\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(assayTypeType, assayTypeMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(assayTypeType, assayTypeMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert assay_type") + } + + if !cached { + assayTypeUpsertCacheMut.Lock() + assayTypeUpsertCache[key] = cache + assayTypeUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single AssayType record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *AssayType) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no AssayType provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), assayTypePrimaryKeyMapping) + sql := "DELETE FROM \"assay_type\" WHERE \"assay_type\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from assay_type") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for assay_type") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q assayTypeQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no assayTypeQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from assay_type") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for assay_type") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o AssayTypeSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(assayTypeBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), assayTypePrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"assay_type\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, assayTypePrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from assayType slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for assay_type") + } + + if len(assayTypeAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *AssayType) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindAssayType(ctx, exec, o.AssayType) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *AssayTypeSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := AssayTypeSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), assayTypePrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"assay_type\".* FROM \"assay_type\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, assayTypePrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in AssayTypeSlice") + } + + *o = slice + + return nil +} + +// AssayTypeExists checks if the AssayType row exists. +func AssayTypeExists(ctx context.Context, exec boil.ContextExecutor, assayType string) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"assay_type\" where \"assay_type\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, assayType) + } + row := exec.QueryRowContext(ctx, sql, assayType) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if assay_type exists") + } + + return exists, nil +} diff --git a/models/assays.go b/models/assays.go new file mode 100644 index 0000000..8ffba3c --- /dev/null +++ b/models/assays.go @@ -0,0 +1,3967 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// Assay is an object representing the database table. +type Assay struct { + AssayID int64 `boil:"assay_id" json:"assay_id" toml:"assay_id" yaml:"assay_id"` + DocID int64 `boil:"doc_id" json:"doc_id" toml:"doc_id" yaml:"doc_id"` + Description null.String `boil:"description" json:"description,omitempty" toml:"description" yaml:"description,omitempty"` + AssayType null.String `boil:"assay_type" json:"assay_type,omitempty" toml:"assay_type" yaml:"assay_type,omitempty"` + AssayTestType null.String `boil:"assay_test_type" json:"assay_test_type,omitempty" toml:"assay_test_type" yaml:"assay_test_type,omitempty"` + AssayCategory null.String `boil:"assay_category" json:"assay_category,omitempty" toml:"assay_category" yaml:"assay_category,omitempty"` + AssayOrganism null.String `boil:"assay_organism" json:"assay_organism,omitempty" toml:"assay_organism" yaml:"assay_organism,omitempty"` + AssayTaxID null.Int64 `boil:"assay_tax_id" json:"assay_tax_id,omitempty" toml:"assay_tax_id" yaml:"assay_tax_id,omitempty"` + AssayStrain null.String `boil:"assay_strain" json:"assay_strain,omitempty" toml:"assay_strain" yaml:"assay_strain,omitempty"` + AssayTissue null.String `boil:"assay_tissue" json:"assay_tissue,omitempty" toml:"assay_tissue" yaml:"assay_tissue,omitempty"` + AssayCellType null.String `boil:"assay_cell_type" json:"assay_cell_type,omitempty" toml:"assay_cell_type" yaml:"assay_cell_type,omitempty"` + AssaySubcellularFraction null.String `boil:"assay_subcellular_fraction" json:"assay_subcellular_fraction,omitempty" toml:"assay_subcellular_fraction" yaml:"assay_subcellular_fraction,omitempty"` + Tid null.Int64 `boil:"tid" json:"tid,omitempty" toml:"tid" yaml:"tid,omitempty"` + RelationshipType null.String `boil:"relationship_type" json:"relationship_type,omitempty" toml:"relationship_type" yaml:"relationship_type,omitempty"` + ConfidenceScore null.Int16 `boil:"confidence_score" json:"confidence_score,omitempty" toml:"confidence_score" yaml:"confidence_score,omitempty"` + CuratedBy null.String `boil:"curated_by" json:"curated_by,omitempty" toml:"curated_by" yaml:"curated_by,omitempty"` + SRCID int64 `boil:"src_id" json:"src_id" toml:"src_id" yaml:"src_id"` + SRCAssayID null.String `boil:"src_assay_id" json:"src_assay_id,omitempty" toml:"src_assay_id" yaml:"src_assay_id,omitempty"` + ChemblID string `boil:"chembl_id" json:"chembl_id" toml:"chembl_id" yaml:"chembl_id"` + CellID null.Int64 `boil:"cell_id" json:"cell_id,omitempty" toml:"cell_id" yaml:"cell_id,omitempty"` + BaoFormat null.String `boil:"bao_format" json:"bao_format,omitempty" toml:"bao_format" yaml:"bao_format,omitempty"` + TissueID null.Int64 `boil:"tissue_id" json:"tissue_id,omitempty" toml:"tissue_id" yaml:"tissue_id,omitempty"` + VariantID null.Int64 `boil:"variant_id" json:"variant_id,omitempty" toml:"variant_id" yaml:"variant_id,omitempty"` + Aidx string `boil:"aidx" json:"aidx" toml:"aidx" yaml:"aidx"` + + R *assayR `boil:"-" json:"-" toml:"-" yaml:"-"` + L assayL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var AssayColumns = struct { + AssayID string + DocID string + Description string + AssayType string + AssayTestType string + AssayCategory string + AssayOrganism string + AssayTaxID string + AssayStrain string + AssayTissue string + AssayCellType string + AssaySubcellularFraction string + Tid string + RelationshipType string + ConfidenceScore string + CuratedBy string + SRCID string + SRCAssayID string + ChemblID string + CellID string + BaoFormat string + TissueID string + VariantID string + Aidx string +}{ + AssayID: "assay_id", + DocID: "doc_id", + Description: "description", + AssayType: "assay_type", + AssayTestType: "assay_test_type", + AssayCategory: "assay_category", + AssayOrganism: "assay_organism", + AssayTaxID: "assay_tax_id", + AssayStrain: "assay_strain", + AssayTissue: "assay_tissue", + AssayCellType: "assay_cell_type", + AssaySubcellularFraction: "assay_subcellular_fraction", + Tid: "tid", + RelationshipType: "relationship_type", + ConfidenceScore: "confidence_score", + CuratedBy: "curated_by", + SRCID: "src_id", + SRCAssayID: "src_assay_id", + ChemblID: "chembl_id", + CellID: "cell_id", + BaoFormat: "bao_format", + TissueID: "tissue_id", + VariantID: "variant_id", + Aidx: "aidx", +} + +var AssayTableColumns = struct { + AssayID string + DocID string + Description string + AssayType string + AssayTestType string + AssayCategory string + AssayOrganism string + AssayTaxID string + AssayStrain string + AssayTissue string + AssayCellType string + AssaySubcellularFraction string + Tid string + RelationshipType string + ConfidenceScore string + CuratedBy string + SRCID string + SRCAssayID string + ChemblID string + CellID string + BaoFormat string + TissueID string + VariantID string + Aidx string +}{ + AssayID: "assays.assay_id", + DocID: "assays.doc_id", + Description: "assays.description", + AssayType: "assays.assay_type", + AssayTestType: "assays.assay_test_type", + AssayCategory: "assays.assay_category", + AssayOrganism: "assays.assay_organism", + AssayTaxID: "assays.assay_tax_id", + AssayStrain: "assays.assay_strain", + AssayTissue: "assays.assay_tissue", + AssayCellType: "assays.assay_cell_type", + AssaySubcellularFraction: "assays.assay_subcellular_fraction", + Tid: "assays.tid", + RelationshipType: "assays.relationship_type", + ConfidenceScore: "assays.confidence_score", + CuratedBy: "assays.curated_by", + SRCID: "assays.src_id", + SRCAssayID: "assays.src_assay_id", + ChemblID: "assays.chembl_id", + CellID: "assays.cell_id", + BaoFormat: "assays.bao_format", + TissueID: "assays.tissue_id", + VariantID: "assays.variant_id", + Aidx: "assays.aidx", +} + +// Generated where + +var AssayWhere = struct { + AssayID whereHelperint64 + DocID whereHelperint64 + Description whereHelpernull_String + AssayType whereHelpernull_String + AssayTestType whereHelpernull_String + AssayCategory whereHelpernull_String + AssayOrganism whereHelpernull_String + AssayTaxID whereHelpernull_Int64 + AssayStrain whereHelpernull_String + AssayTissue whereHelpernull_String + AssayCellType whereHelpernull_String + AssaySubcellularFraction whereHelpernull_String + Tid whereHelpernull_Int64 + RelationshipType whereHelpernull_String + ConfidenceScore whereHelpernull_Int16 + CuratedBy whereHelpernull_String + SRCID whereHelperint64 + SRCAssayID whereHelpernull_String + ChemblID whereHelperstring + CellID whereHelpernull_Int64 + BaoFormat whereHelpernull_String + TissueID whereHelpernull_Int64 + VariantID whereHelpernull_Int64 + Aidx whereHelperstring +}{ + AssayID: whereHelperint64{field: "\"assays\".\"assay_id\""}, + DocID: whereHelperint64{field: "\"assays\".\"doc_id\""}, + Description: whereHelpernull_String{field: "\"assays\".\"description\""}, + AssayType: whereHelpernull_String{field: "\"assays\".\"assay_type\""}, + AssayTestType: whereHelpernull_String{field: "\"assays\".\"assay_test_type\""}, + AssayCategory: whereHelpernull_String{field: "\"assays\".\"assay_category\""}, + AssayOrganism: whereHelpernull_String{field: "\"assays\".\"assay_organism\""}, + AssayTaxID: whereHelpernull_Int64{field: "\"assays\".\"assay_tax_id\""}, + AssayStrain: whereHelpernull_String{field: "\"assays\".\"assay_strain\""}, + AssayTissue: whereHelpernull_String{field: "\"assays\".\"assay_tissue\""}, + AssayCellType: whereHelpernull_String{field: "\"assays\".\"assay_cell_type\""}, + AssaySubcellularFraction: whereHelpernull_String{field: "\"assays\".\"assay_subcellular_fraction\""}, + Tid: whereHelpernull_Int64{field: "\"assays\".\"tid\""}, + RelationshipType: whereHelpernull_String{field: "\"assays\".\"relationship_type\""}, + ConfidenceScore: whereHelpernull_Int16{field: "\"assays\".\"confidence_score\""}, + CuratedBy: whereHelpernull_String{field: "\"assays\".\"curated_by\""}, + SRCID: whereHelperint64{field: "\"assays\".\"src_id\""}, + SRCAssayID: whereHelpernull_String{field: "\"assays\".\"src_assay_id\""}, + ChemblID: whereHelperstring{field: "\"assays\".\"chembl_id\""}, + CellID: whereHelpernull_Int64{field: "\"assays\".\"cell_id\""}, + BaoFormat: whereHelpernull_String{field: "\"assays\".\"bao_format\""}, + TissueID: whereHelpernull_Int64{field: "\"assays\".\"tissue_id\""}, + VariantID: whereHelpernull_Int64{field: "\"assays\".\"variant_id\""}, + Aidx: whereHelperstring{field: "\"assays\".\"aidx\""}, +} + +// AssayRels is where relationship names are stored. +var AssayRels = struct { + BaoFormatBioassayOntology string + Variant string + Tissue string + TidTargetDictionary string + SRC string + AssayRelationshipType string + Doc string + CuratedByCurationLookup string + ConfidenceScoreConfidenceScoreLookup string + Chembl string + Cell string + AssayAssayType string + Activities string + AssayClassMaps string + AssayParameters string +}{ + BaoFormatBioassayOntology: "BaoFormatBioassayOntology", + Variant: "Variant", + Tissue: "Tissue", + TidTargetDictionary: "TidTargetDictionary", + SRC: "SRC", + AssayRelationshipType: "AssayRelationshipType", + Doc: "Doc", + CuratedByCurationLookup: "CuratedByCurationLookup", + ConfidenceScoreConfidenceScoreLookup: "ConfidenceScoreConfidenceScoreLookup", + Chembl: "Chembl", + Cell: "Cell", + AssayAssayType: "AssayAssayType", + Activities: "Activities", + AssayClassMaps: "AssayClassMaps", + AssayParameters: "AssayParameters", +} + +// assayR is where relationships are stored. +type assayR struct { + BaoFormatBioassayOntology *BioassayOntology `boil:"BaoFormatBioassayOntology" json:"BaoFormatBioassayOntology" toml:"BaoFormatBioassayOntology" yaml:"BaoFormatBioassayOntology"` + Variant *VariantSequence `boil:"Variant" json:"Variant" toml:"Variant" yaml:"Variant"` + Tissue *TissueDictionary `boil:"Tissue" json:"Tissue" toml:"Tissue" yaml:"Tissue"` + TidTargetDictionary *TargetDictionary `boil:"TidTargetDictionary" json:"TidTargetDictionary" toml:"TidTargetDictionary" yaml:"TidTargetDictionary"` + SRC *Source `boil:"SRC" json:"SRC" toml:"SRC" yaml:"SRC"` + AssayRelationshipType *RelationshipType `boil:"AssayRelationshipType" json:"AssayRelationshipType" toml:"AssayRelationshipType" yaml:"AssayRelationshipType"` + Doc *Doc `boil:"Doc" json:"Doc" toml:"Doc" yaml:"Doc"` + CuratedByCurationLookup *CurationLookup `boil:"CuratedByCurationLookup" json:"CuratedByCurationLookup" toml:"CuratedByCurationLookup" yaml:"CuratedByCurationLookup"` + ConfidenceScoreConfidenceScoreLookup *ConfidenceScoreLookup `boil:"ConfidenceScoreConfidenceScoreLookup" json:"ConfidenceScoreConfidenceScoreLookup" toml:"ConfidenceScoreConfidenceScoreLookup" yaml:"ConfidenceScoreConfidenceScoreLookup"` + Chembl *ChemblIDLookup `boil:"Chembl" json:"Chembl" toml:"Chembl" yaml:"Chembl"` + Cell *CellDictionary `boil:"Cell" json:"Cell" toml:"Cell" yaml:"Cell"` + AssayAssayType *AssayType `boil:"AssayAssayType" json:"AssayAssayType" toml:"AssayAssayType" yaml:"AssayAssayType"` + Activities ActivitySlice `boil:"Activities" json:"Activities" toml:"Activities" yaml:"Activities"` + AssayClassMaps AssayClassMapSlice `boil:"AssayClassMaps" json:"AssayClassMaps" toml:"AssayClassMaps" yaml:"AssayClassMaps"` + AssayParameters AssayParameterSlice `boil:"AssayParameters" json:"AssayParameters" toml:"AssayParameters" yaml:"AssayParameters"` +} + +// NewStruct creates a new relationship struct +func (*assayR) NewStruct() *assayR { + return &assayR{} +} + +func (r *assayR) GetBaoFormatBioassayOntology() *BioassayOntology { + if r == nil { + return nil + } + return r.BaoFormatBioassayOntology +} + +func (r *assayR) GetVariant() *VariantSequence { + if r == nil { + return nil + } + return r.Variant +} + +func (r *assayR) GetTissue() *TissueDictionary { + if r == nil { + return nil + } + return r.Tissue +} + +func (r *assayR) GetTidTargetDictionary() *TargetDictionary { + if r == nil { + return nil + } + return r.TidTargetDictionary +} + +func (r *assayR) GetSRC() *Source { + if r == nil { + return nil + } + return r.SRC +} + +func (r *assayR) GetAssayRelationshipType() *RelationshipType { + if r == nil { + return nil + } + return r.AssayRelationshipType +} + +func (r *assayR) GetDoc() *Doc { + if r == nil { + return nil + } + return r.Doc +} + +func (r *assayR) GetCuratedByCurationLookup() *CurationLookup { + if r == nil { + return nil + } + return r.CuratedByCurationLookup +} + +func (r *assayR) GetConfidenceScoreConfidenceScoreLookup() *ConfidenceScoreLookup { + if r == nil { + return nil + } + return r.ConfidenceScoreConfidenceScoreLookup +} + +func (r *assayR) GetChembl() *ChemblIDLookup { + if r == nil { + return nil + } + return r.Chembl +} + +func (r *assayR) GetCell() *CellDictionary { + if r == nil { + return nil + } + return r.Cell +} + +func (r *assayR) GetAssayAssayType() *AssayType { + if r == nil { + return nil + } + return r.AssayAssayType +} + +func (r *assayR) GetActivities() ActivitySlice { + if r == nil { + return nil + } + return r.Activities +} + +func (r *assayR) GetAssayClassMaps() AssayClassMapSlice { + if r == nil { + return nil + } + return r.AssayClassMaps +} + +func (r *assayR) GetAssayParameters() AssayParameterSlice { + if r == nil { + return nil + } + return r.AssayParameters +} + +// assayL is where Load methods for each relationship are stored. +type assayL struct{} + +var ( + assayAllColumns = []string{"assay_id", "doc_id", "description", "assay_type", "assay_test_type", "assay_category", "assay_organism", "assay_tax_id", "assay_strain", "assay_tissue", "assay_cell_type", "assay_subcellular_fraction", "tid", "relationship_type", "confidence_score", "curated_by", "src_id", "src_assay_id", "chembl_id", "cell_id", "bao_format", "tissue_id", "variant_id", "aidx"} + assayColumnsWithoutDefault = []string{"assay_id", "doc_id", "src_id", "chembl_id", "aidx"} + assayColumnsWithDefault = []string{"description", "assay_type", "assay_test_type", "assay_category", "assay_organism", "assay_tax_id", "assay_strain", "assay_tissue", "assay_cell_type", "assay_subcellular_fraction", "tid", "relationship_type", "confidence_score", "curated_by", "src_assay_id", "cell_id", "bao_format", "tissue_id", "variant_id"} + assayPrimaryKeyColumns = []string{"assay_id"} + assayGeneratedColumns = []string{} +) + +type ( + // AssaySlice is an alias for a slice of pointers to Assay. + // This should almost always be used instead of []Assay. + AssaySlice []*Assay + // AssayHook is the signature for custom Assay hook methods + AssayHook func(context.Context, boil.ContextExecutor, *Assay) error + + assayQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + assayType = reflect.TypeOf(&Assay{}) + assayMapping = queries.MakeStructMapping(assayType) + assayPrimaryKeyMapping, _ = queries.BindMapping(assayType, assayMapping, assayPrimaryKeyColumns) + assayInsertCacheMut sync.RWMutex + assayInsertCache = make(map[string]insertCache) + assayUpdateCacheMut sync.RWMutex + assayUpdateCache = make(map[string]updateCache) + assayUpsertCacheMut sync.RWMutex + assayUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var assayAfterSelectHooks []AssayHook + +var assayBeforeInsertHooks []AssayHook +var assayAfterInsertHooks []AssayHook + +var assayBeforeUpdateHooks []AssayHook +var assayAfterUpdateHooks []AssayHook + +var assayBeforeDeleteHooks []AssayHook +var assayAfterDeleteHooks []AssayHook + +var assayBeforeUpsertHooks []AssayHook +var assayAfterUpsertHooks []AssayHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *Assay) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range assayAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *Assay) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range assayBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *Assay) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range assayAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *Assay) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range assayBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *Assay) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range assayAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *Assay) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range assayBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *Assay) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range assayAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *Assay) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range assayBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *Assay) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range assayAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddAssayHook registers your hook function for all future operations. +func AddAssayHook(hookPoint boil.HookPoint, assayHook AssayHook) { + switch hookPoint { + case boil.AfterSelectHook: + assayAfterSelectHooks = append(assayAfterSelectHooks, assayHook) + case boil.BeforeInsertHook: + assayBeforeInsertHooks = append(assayBeforeInsertHooks, assayHook) + case boil.AfterInsertHook: + assayAfterInsertHooks = append(assayAfterInsertHooks, assayHook) + case boil.BeforeUpdateHook: + assayBeforeUpdateHooks = append(assayBeforeUpdateHooks, assayHook) + case boil.AfterUpdateHook: + assayAfterUpdateHooks = append(assayAfterUpdateHooks, assayHook) + case boil.BeforeDeleteHook: + assayBeforeDeleteHooks = append(assayBeforeDeleteHooks, assayHook) + case boil.AfterDeleteHook: + assayAfterDeleteHooks = append(assayAfterDeleteHooks, assayHook) + case boil.BeforeUpsertHook: + assayBeforeUpsertHooks = append(assayBeforeUpsertHooks, assayHook) + case boil.AfterUpsertHook: + assayAfterUpsertHooks = append(assayAfterUpsertHooks, assayHook) + } +} + +// One returns a single assay record from the query. +func (q assayQuery) One(ctx context.Context, exec boil.ContextExecutor) (*Assay, error) { + o := &Assay{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for assays") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all Assay records from the query. +func (q assayQuery) All(ctx context.Context, exec boil.ContextExecutor) (AssaySlice, error) { + var o []*Assay + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to Assay slice") + } + + if len(assayAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all Assay records in the query. +func (q assayQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count assays rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q assayQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if assays exists") + } + + return count > 0, nil +} + +// BaoFormatBioassayOntology pointed to by the foreign key. +func (o *Assay) BaoFormatBioassayOntology(mods ...qm.QueryMod) bioassayOntologyQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"bao_id\" = ?", o.BaoFormat), + } + + queryMods = append(queryMods, mods...) + + return BioassayOntologies(queryMods...) +} + +// Variant pointed to by the foreign key. +func (o *Assay) Variant(mods ...qm.QueryMod) variantSequenceQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"variant_id\" = ?", o.VariantID), + } + + queryMods = append(queryMods, mods...) + + return VariantSequences(queryMods...) +} + +// Tissue pointed to by the foreign key. +func (o *Assay) Tissue(mods ...qm.QueryMod) tissueDictionaryQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"tissue_id\" = ?", o.TissueID), + } + + queryMods = append(queryMods, mods...) + + return TissueDictionaries(queryMods...) +} + +// TidTargetDictionary pointed to by the foreign key. +func (o *Assay) TidTargetDictionary(mods ...qm.QueryMod) targetDictionaryQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"tid\" = ?", o.Tid), + } + + queryMods = append(queryMods, mods...) + + return TargetDictionaries(queryMods...) +} + +// SRC pointed to by the foreign key. +func (o *Assay) SRC(mods ...qm.QueryMod) sourceQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"src_id\" = ?", o.SRCID), + } + + queryMods = append(queryMods, mods...) + + return Sources(queryMods...) +} + +// AssayRelationshipType pointed to by the foreign key. +func (o *Assay) AssayRelationshipType(mods ...qm.QueryMod) relationshipTypeQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"relationship_type\" = ?", o.RelationshipType), + } + + queryMods = append(queryMods, mods...) + + return RelationshipTypes(queryMods...) +} + +// Doc pointed to by the foreign key. +func (o *Assay) Doc(mods ...qm.QueryMod) docQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"doc_id\" = ?", o.DocID), + } + + queryMods = append(queryMods, mods...) + + return Docs(queryMods...) +} + +// CuratedByCurationLookup pointed to by the foreign key. +func (o *Assay) CuratedByCurationLookup(mods ...qm.QueryMod) curationLookupQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"curated_by\" = ?", o.CuratedBy), + } + + queryMods = append(queryMods, mods...) + + return CurationLookups(queryMods...) +} + +// ConfidenceScoreConfidenceScoreLookup pointed to by the foreign key. +func (o *Assay) ConfidenceScoreConfidenceScoreLookup(mods ...qm.QueryMod) confidenceScoreLookupQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"confidence_score\" = ?", o.ConfidenceScore), + } + + queryMods = append(queryMods, mods...) + + return ConfidenceScoreLookups(queryMods...) +} + +// Chembl pointed to by the foreign key. +func (o *Assay) Chembl(mods ...qm.QueryMod) chemblIDLookupQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"chembl_id\" = ?", o.ChemblID), + } + + queryMods = append(queryMods, mods...) + + return ChemblIDLookups(queryMods...) +} + +// Cell pointed to by the foreign key. +func (o *Assay) Cell(mods ...qm.QueryMod) cellDictionaryQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"cell_id\" = ?", o.CellID), + } + + queryMods = append(queryMods, mods...) + + return CellDictionaries(queryMods...) +} + +// AssayAssayType pointed to by the foreign key. +func (o *Assay) AssayAssayType(mods ...qm.QueryMod) assayTypeQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"assay_type\" = ?", o.AssayType), + } + + queryMods = append(queryMods, mods...) + + return AssayTypes(queryMods...) +} + +// Activities retrieves all the activity's Activities with an executor. +func (o *Assay) Activities(mods ...qm.QueryMod) activityQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"activities\".\"assay_id\"=?", o.AssayID), + ) + + return Activities(queryMods...) +} + +// AssayClassMaps retrieves all the assay_class_map's AssayClassMaps with an executor. +func (o *Assay) AssayClassMaps(mods ...qm.QueryMod) assayClassMapQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"assay_class_map\".\"assay_id\"=?", o.AssayID), + ) + + return AssayClassMaps(queryMods...) +} + +// AssayParameters retrieves all the assay_parameter's AssayParameters with an executor. +func (o *Assay) AssayParameters(mods ...qm.QueryMod) assayParameterQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"assay_parameters\".\"assay_id\"=?", o.AssayID), + ) + + return AssayParameters(queryMods...) +} + +// LoadBaoFormatBioassayOntology allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (assayL) LoadBaoFormatBioassayOntology(ctx context.Context, e boil.ContextExecutor, singular bool, maybeAssay interface{}, mods queries.Applicator) error { + var slice []*Assay + var object *Assay + + if singular { + object = maybeAssay.(*Assay) + } else { + slice = *maybeAssay.(*[]*Assay) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &assayR{} + } + if !queries.IsNil(object.BaoFormat) { + args = append(args, object.BaoFormat) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &assayR{} + } + + for _, a := range args { + if queries.Equal(a, obj.BaoFormat) { + continue Outer + } + } + + if !queries.IsNil(obj.BaoFormat) { + args = append(args, obj.BaoFormat) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`bioassay_ontology`), + qm.WhereIn(`bioassay_ontology.bao_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load BioassayOntology") + } + + var resultSlice []*BioassayOntology + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice BioassayOntology") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for bioassay_ontology") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for bioassay_ontology") + } + + if len(assayAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.BaoFormatBioassayOntology = foreign + if foreign.R == nil { + foreign.R = &bioassayOntologyR{} + } + foreign.R.BaoFormatAssays = append(foreign.R.BaoFormatAssays, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.BaoFormat, foreign.BaoID) { + local.R.BaoFormatBioassayOntology = foreign + if foreign.R == nil { + foreign.R = &bioassayOntologyR{} + } + foreign.R.BaoFormatAssays = append(foreign.R.BaoFormatAssays, local) + break + } + } + } + + return nil +} + +// LoadVariant allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (assayL) LoadVariant(ctx context.Context, e boil.ContextExecutor, singular bool, maybeAssay interface{}, mods queries.Applicator) error { + var slice []*Assay + var object *Assay + + if singular { + object = maybeAssay.(*Assay) + } else { + slice = *maybeAssay.(*[]*Assay) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &assayR{} + } + if !queries.IsNil(object.VariantID) { + args = append(args, object.VariantID) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &assayR{} + } + + for _, a := range args { + if queries.Equal(a, obj.VariantID) { + continue Outer + } + } + + if !queries.IsNil(obj.VariantID) { + args = append(args, obj.VariantID) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`variant_sequences`), + qm.WhereIn(`variant_sequences.variant_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load VariantSequence") + } + + var resultSlice []*VariantSequence + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice VariantSequence") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for variant_sequences") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for variant_sequences") + } + + if len(assayAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Variant = foreign + if foreign.R == nil { + foreign.R = &variantSequenceR{} + } + foreign.R.VariantAssays = append(foreign.R.VariantAssays, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.VariantID, foreign.VariantID) { + local.R.Variant = foreign + if foreign.R == nil { + foreign.R = &variantSequenceR{} + } + foreign.R.VariantAssays = append(foreign.R.VariantAssays, local) + break + } + } + } + + return nil +} + +// LoadTissue allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (assayL) LoadTissue(ctx context.Context, e boil.ContextExecutor, singular bool, maybeAssay interface{}, mods queries.Applicator) error { + var slice []*Assay + var object *Assay + + if singular { + object = maybeAssay.(*Assay) + } else { + slice = *maybeAssay.(*[]*Assay) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &assayR{} + } + if !queries.IsNil(object.TissueID) { + args = append(args, object.TissueID) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &assayR{} + } + + for _, a := range args { + if queries.Equal(a, obj.TissueID) { + continue Outer + } + } + + if !queries.IsNil(obj.TissueID) { + args = append(args, obj.TissueID) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`tissue_dictionary`), + qm.WhereIn(`tissue_dictionary.tissue_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load TissueDictionary") + } + + var resultSlice []*TissueDictionary + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice TissueDictionary") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for tissue_dictionary") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for tissue_dictionary") + } + + if len(assayAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Tissue = foreign + if foreign.R == nil { + foreign.R = &tissueDictionaryR{} + } + foreign.R.TissueAssays = append(foreign.R.TissueAssays, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.TissueID, foreign.TissueID) { + local.R.Tissue = foreign + if foreign.R == nil { + foreign.R = &tissueDictionaryR{} + } + foreign.R.TissueAssays = append(foreign.R.TissueAssays, local) + break + } + } + } + + return nil +} + +// LoadTidTargetDictionary allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (assayL) LoadTidTargetDictionary(ctx context.Context, e boil.ContextExecutor, singular bool, maybeAssay interface{}, mods queries.Applicator) error { + var slice []*Assay + var object *Assay + + if singular { + object = maybeAssay.(*Assay) + } else { + slice = *maybeAssay.(*[]*Assay) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &assayR{} + } + if !queries.IsNil(object.Tid) { + args = append(args, object.Tid) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &assayR{} + } + + for _, a := range args { + if queries.Equal(a, obj.Tid) { + continue Outer + } + } + + if !queries.IsNil(obj.Tid) { + args = append(args, obj.Tid) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`target_dictionary`), + qm.WhereIn(`target_dictionary.tid in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load TargetDictionary") + } + + var resultSlice []*TargetDictionary + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice TargetDictionary") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for target_dictionary") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for target_dictionary") + } + + if len(assayAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.TidTargetDictionary = foreign + if foreign.R == nil { + foreign.R = &targetDictionaryR{} + } + foreign.R.TidAssays = append(foreign.R.TidAssays, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.Tid, foreign.Tid) { + local.R.TidTargetDictionary = foreign + if foreign.R == nil { + foreign.R = &targetDictionaryR{} + } + foreign.R.TidAssays = append(foreign.R.TidAssays, local) + break + } + } + } + + return nil +} + +// LoadSRC allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (assayL) LoadSRC(ctx context.Context, e boil.ContextExecutor, singular bool, maybeAssay interface{}, mods queries.Applicator) error { + var slice []*Assay + var object *Assay + + if singular { + object = maybeAssay.(*Assay) + } else { + slice = *maybeAssay.(*[]*Assay) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &assayR{} + } + args = append(args, object.SRCID) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &assayR{} + } + + for _, a := range args { + if a == obj.SRCID { + continue Outer + } + } + + args = append(args, obj.SRCID) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`source`), + qm.WhereIn(`source.src_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load Source") + } + + var resultSlice []*Source + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice Source") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for source") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for source") + } + + if len(assayAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.SRC = foreign + if foreign.R == nil { + foreign.R = &sourceR{} + } + foreign.R.SRCAssays = append(foreign.R.SRCAssays, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.SRCID == foreign.SRCID { + local.R.SRC = foreign + if foreign.R == nil { + foreign.R = &sourceR{} + } + foreign.R.SRCAssays = append(foreign.R.SRCAssays, local) + break + } + } + } + + return nil +} + +// LoadAssayRelationshipType allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (assayL) LoadAssayRelationshipType(ctx context.Context, e boil.ContextExecutor, singular bool, maybeAssay interface{}, mods queries.Applicator) error { + var slice []*Assay + var object *Assay + + if singular { + object = maybeAssay.(*Assay) + } else { + slice = *maybeAssay.(*[]*Assay) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &assayR{} + } + if !queries.IsNil(object.RelationshipType) { + args = append(args, object.RelationshipType) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &assayR{} + } + + for _, a := range args { + if queries.Equal(a, obj.RelationshipType) { + continue Outer + } + } + + if !queries.IsNil(obj.RelationshipType) { + args = append(args, obj.RelationshipType) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`relationship_type`), + qm.WhereIn(`relationship_type.relationship_type in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load RelationshipType") + } + + var resultSlice []*RelationshipType + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice RelationshipType") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for relationship_type") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for relationship_type") + } + + if len(assayAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.AssayRelationshipType = foreign + if foreign.R == nil { + foreign.R = &relationshipTypeR{} + } + foreign.R.Assays = append(foreign.R.Assays, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.RelationshipType, foreign.RelationshipType) { + local.R.AssayRelationshipType = foreign + if foreign.R == nil { + foreign.R = &relationshipTypeR{} + } + foreign.R.Assays = append(foreign.R.Assays, local) + break + } + } + } + + return nil +} + +// LoadDoc allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (assayL) LoadDoc(ctx context.Context, e boil.ContextExecutor, singular bool, maybeAssay interface{}, mods queries.Applicator) error { + var slice []*Assay + var object *Assay + + if singular { + object = maybeAssay.(*Assay) + } else { + slice = *maybeAssay.(*[]*Assay) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &assayR{} + } + args = append(args, object.DocID) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &assayR{} + } + + for _, a := range args { + if a == obj.DocID { + continue Outer + } + } + + args = append(args, obj.DocID) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`docs`), + qm.WhereIn(`docs.doc_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load Doc") + } + + var resultSlice []*Doc + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice Doc") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for docs") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for docs") + } + + if len(assayAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Doc = foreign + if foreign.R == nil { + foreign.R = &docR{} + } + foreign.R.Assays = append(foreign.R.Assays, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.DocID == foreign.DocID { + local.R.Doc = foreign + if foreign.R == nil { + foreign.R = &docR{} + } + foreign.R.Assays = append(foreign.R.Assays, local) + break + } + } + } + + return nil +} + +// LoadCuratedByCurationLookup allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (assayL) LoadCuratedByCurationLookup(ctx context.Context, e boil.ContextExecutor, singular bool, maybeAssay interface{}, mods queries.Applicator) error { + var slice []*Assay + var object *Assay + + if singular { + object = maybeAssay.(*Assay) + } else { + slice = *maybeAssay.(*[]*Assay) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &assayR{} + } + if !queries.IsNil(object.CuratedBy) { + args = append(args, object.CuratedBy) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &assayR{} + } + + for _, a := range args { + if queries.Equal(a, obj.CuratedBy) { + continue Outer + } + } + + if !queries.IsNil(obj.CuratedBy) { + args = append(args, obj.CuratedBy) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`curation_lookup`), + qm.WhereIn(`curation_lookup.curated_by in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load CurationLookup") + } + + var resultSlice []*CurationLookup + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice CurationLookup") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for curation_lookup") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for curation_lookup") + } + + if len(assayAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.CuratedByCurationLookup = foreign + if foreign.R == nil { + foreign.R = &curationLookupR{} + } + foreign.R.CuratedByAssays = append(foreign.R.CuratedByAssays, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.CuratedBy, foreign.CuratedBy) { + local.R.CuratedByCurationLookup = foreign + if foreign.R == nil { + foreign.R = &curationLookupR{} + } + foreign.R.CuratedByAssays = append(foreign.R.CuratedByAssays, local) + break + } + } + } + + return nil +} + +// LoadConfidenceScoreConfidenceScoreLookup allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (assayL) LoadConfidenceScoreConfidenceScoreLookup(ctx context.Context, e boil.ContextExecutor, singular bool, maybeAssay interface{}, mods queries.Applicator) error { + var slice []*Assay + var object *Assay + + if singular { + object = maybeAssay.(*Assay) + } else { + slice = *maybeAssay.(*[]*Assay) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &assayR{} + } + if !queries.IsNil(object.ConfidenceScore) { + args = append(args, object.ConfidenceScore) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &assayR{} + } + + for _, a := range args { + if queries.Equal(a, obj.ConfidenceScore) { + continue Outer + } + } + + if !queries.IsNil(obj.ConfidenceScore) { + args = append(args, obj.ConfidenceScore) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`confidence_score_lookup`), + qm.WhereIn(`confidence_score_lookup.confidence_score in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load ConfidenceScoreLookup") + } + + var resultSlice []*ConfidenceScoreLookup + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice ConfidenceScoreLookup") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for confidence_score_lookup") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for confidence_score_lookup") + } + + if len(assayAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.ConfidenceScoreConfidenceScoreLookup = foreign + if foreign.R == nil { + foreign.R = &confidenceScoreLookupR{} + } + foreign.R.ConfidenceScoreAssays = append(foreign.R.ConfidenceScoreAssays, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.ConfidenceScore, foreign.ConfidenceScore) { + local.R.ConfidenceScoreConfidenceScoreLookup = foreign + if foreign.R == nil { + foreign.R = &confidenceScoreLookupR{} + } + foreign.R.ConfidenceScoreAssays = append(foreign.R.ConfidenceScoreAssays, local) + break + } + } + } + + return nil +} + +// LoadChembl allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (assayL) LoadChembl(ctx context.Context, e boil.ContextExecutor, singular bool, maybeAssay interface{}, mods queries.Applicator) error { + var slice []*Assay + var object *Assay + + if singular { + object = maybeAssay.(*Assay) + } else { + slice = *maybeAssay.(*[]*Assay) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &assayR{} + } + args = append(args, object.ChemblID) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &assayR{} + } + + for _, a := range args { + if a == obj.ChemblID { + continue Outer + } + } + + args = append(args, obj.ChemblID) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`chembl_id_lookup`), + qm.WhereIn(`chembl_id_lookup.chembl_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load ChemblIDLookup") + } + + var resultSlice []*ChemblIDLookup + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice ChemblIDLookup") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for chembl_id_lookup") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for chembl_id_lookup") + } + + if len(assayAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Chembl = foreign + if foreign.R == nil { + foreign.R = &chemblIDLookupR{} + } + foreign.R.ChemblAssay = object + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.ChemblID == foreign.ChemblID { + local.R.Chembl = foreign + if foreign.R == nil { + foreign.R = &chemblIDLookupR{} + } + foreign.R.ChemblAssay = local + break + } + } + } + + return nil +} + +// LoadCell allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (assayL) LoadCell(ctx context.Context, e boil.ContextExecutor, singular bool, maybeAssay interface{}, mods queries.Applicator) error { + var slice []*Assay + var object *Assay + + if singular { + object = maybeAssay.(*Assay) + } else { + slice = *maybeAssay.(*[]*Assay) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &assayR{} + } + if !queries.IsNil(object.CellID) { + args = append(args, object.CellID) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &assayR{} + } + + for _, a := range args { + if queries.Equal(a, obj.CellID) { + continue Outer + } + } + + if !queries.IsNil(obj.CellID) { + args = append(args, obj.CellID) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`cell_dictionary`), + qm.WhereIn(`cell_dictionary.cell_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load CellDictionary") + } + + var resultSlice []*CellDictionary + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice CellDictionary") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for cell_dictionary") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for cell_dictionary") + } + + if len(assayAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Cell = foreign + if foreign.R == nil { + foreign.R = &cellDictionaryR{} + } + foreign.R.CellAssays = append(foreign.R.CellAssays, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.CellID, foreign.CellID) { + local.R.Cell = foreign + if foreign.R == nil { + foreign.R = &cellDictionaryR{} + } + foreign.R.CellAssays = append(foreign.R.CellAssays, local) + break + } + } + } + + return nil +} + +// LoadAssayAssayType allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (assayL) LoadAssayAssayType(ctx context.Context, e boil.ContextExecutor, singular bool, maybeAssay interface{}, mods queries.Applicator) error { + var slice []*Assay + var object *Assay + + if singular { + object = maybeAssay.(*Assay) + } else { + slice = *maybeAssay.(*[]*Assay) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &assayR{} + } + if !queries.IsNil(object.AssayType) { + args = append(args, object.AssayType) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &assayR{} + } + + for _, a := range args { + if queries.Equal(a, obj.AssayType) { + continue Outer + } + } + + if !queries.IsNil(obj.AssayType) { + args = append(args, obj.AssayType) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`assay_type`), + qm.WhereIn(`assay_type.assay_type in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load AssayType") + } + + var resultSlice []*AssayType + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice AssayType") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for assay_type") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for assay_type") + } + + if len(assayAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.AssayAssayType = foreign + if foreign.R == nil { + foreign.R = &assayTypeR{} + } + foreign.R.Assays = append(foreign.R.Assays, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.AssayType, foreign.AssayType) { + local.R.AssayAssayType = foreign + if foreign.R == nil { + foreign.R = &assayTypeR{} + } + foreign.R.Assays = append(foreign.R.Assays, local) + break + } + } + } + + return nil +} + +// LoadActivities allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (assayL) LoadActivities(ctx context.Context, e boil.ContextExecutor, singular bool, maybeAssay interface{}, mods queries.Applicator) error { + var slice []*Assay + var object *Assay + + if singular { + object = maybeAssay.(*Assay) + } else { + slice = *maybeAssay.(*[]*Assay) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &assayR{} + } + args = append(args, object.AssayID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &assayR{} + } + + for _, a := range args { + if a == obj.AssayID { + continue Outer + } + } + + args = append(args, obj.AssayID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`activities`), + qm.WhereIn(`activities.assay_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load activities") + } + + var resultSlice []*Activity + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice activities") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on activities") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for activities") + } + + if len(activityAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.Activities = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &activityR{} + } + foreign.R.Assay = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.AssayID == foreign.AssayID { + local.R.Activities = append(local.R.Activities, foreign) + if foreign.R == nil { + foreign.R = &activityR{} + } + foreign.R.Assay = local + break + } + } + } + + return nil +} + +// LoadAssayClassMaps allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (assayL) LoadAssayClassMaps(ctx context.Context, e boil.ContextExecutor, singular bool, maybeAssay interface{}, mods queries.Applicator) error { + var slice []*Assay + var object *Assay + + if singular { + object = maybeAssay.(*Assay) + } else { + slice = *maybeAssay.(*[]*Assay) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &assayR{} + } + args = append(args, object.AssayID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &assayR{} + } + + for _, a := range args { + if a == obj.AssayID { + continue Outer + } + } + + args = append(args, obj.AssayID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`assay_class_map`), + qm.WhereIn(`assay_class_map.assay_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load assay_class_map") + } + + var resultSlice []*AssayClassMap + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice assay_class_map") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on assay_class_map") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for assay_class_map") + } + + if len(assayClassMapAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.AssayClassMaps = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &assayClassMapR{} + } + foreign.R.Assay = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.AssayID == foreign.AssayID { + local.R.AssayClassMaps = append(local.R.AssayClassMaps, foreign) + if foreign.R == nil { + foreign.R = &assayClassMapR{} + } + foreign.R.Assay = local + break + } + } + } + + return nil +} + +// LoadAssayParameters allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (assayL) LoadAssayParameters(ctx context.Context, e boil.ContextExecutor, singular bool, maybeAssay interface{}, mods queries.Applicator) error { + var slice []*Assay + var object *Assay + + if singular { + object = maybeAssay.(*Assay) + } else { + slice = *maybeAssay.(*[]*Assay) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &assayR{} + } + args = append(args, object.AssayID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &assayR{} + } + + for _, a := range args { + if a == obj.AssayID { + continue Outer + } + } + + args = append(args, obj.AssayID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`assay_parameters`), + qm.WhereIn(`assay_parameters.assay_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load assay_parameters") + } + + var resultSlice []*AssayParameter + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice assay_parameters") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on assay_parameters") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for assay_parameters") + } + + if len(assayParameterAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.AssayParameters = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &assayParameterR{} + } + foreign.R.Assay = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.AssayID == foreign.AssayID { + local.R.AssayParameters = append(local.R.AssayParameters, foreign) + if foreign.R == nil { + foreign.R = &assayParameterR{} + } + foreign.R.Assay = local + break + } + } + } + + return nil +} + +// SetBaoFormatBioassayOntology of the assay to the related item. +// Sets o.R.BaoFormatBioassayOntology to related. +// Adds o to related.R.BaoFormatAssays. +func (o *Assay) SetBaoFormatBioassayOntology(ctx context.Context, exec boil.ContextExecutor, insert bool, related *BioassayOntology) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"assays\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"bao_format"}), + strmangle.WhereClause("\"", "\"", 0, assayPrimaryKeyColumns), + ) + values := []interface{}{related.BaoID, o.AssayID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.BaoFormat, related.BaoID) + if o.R == nil { + o.R = &assayR{ + BaoFormatBioassayOntology: related, + } + } else { + o.R.BaoFormatBioassayOntology = related + } + + if related.R == nil { + related.R = &bioassayOntologyR{ + BaoFormatAssays: AssaySlice{o}, + } + } else { + related.R.BaoFormatAssays = append(related.R.BaoFormatAssays, o) + } + + return nil +} + +// RemoveBaoFormatBioassayOntology relationship. +// Sets o.R.BaoFormatBioassayOntology to nil. +// Removes o from all passed in related items' relationships struct. +func (o *Assay) RemoveBaoFormatBioassayOntology(ctx context.Context, exec boil.ContextExecutor, related *BioassayOntology) error { + var err error + + queries.SetScanner(&o.BaoFormat, nil) + if _, err = o.Update(ctx, exec, boil.Whitelist("bao_format")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.BaoFormatBioassayOntology = nil + } + if related == nil || related.R == nil { + return nil + } + + for i, ri := range related.R.BaoFormatAssays { + if queries.Equal(o.BaoFormat, ri.BaoFormat) { + continue + } + + ln := len(related.R.BaoFormatAssays) + if ln > 1 && i < ln-1 { + related.R.BaoFormatAssays[i] = related.R.BaoFormatAssays[ln-1] + } + related.R.BaoFormatAssays = related.R.BaoFormatAssays[:ln-1] + break + } + return nil +} + +// SetVariant of the assay to the related item. +// Sets o.R.Variant to related. +// Adds o to related.R.VariantAssays. +func (o *Assay) SetVariant(ctx context.Context, exec boil.ContextExecutor, insert bool, related *VariantSequence) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"assays\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"variant_id"}), + strmangle.WhereClause("\"", "\"", 0, assayPrimaryKeyColumns), + ) + values := []interface{}{related.VariantID, o.AssayID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.VariantID, related.VariantID) + if o.R == nil { + o.R = &assayR{ + Variant: related, + } + } else { + o.R.Variant = related + } + + if related.R == nil { + related.R = &variantSequenceR{ + VariantAssays: AssaySlice{o}, + } + } else { + related.R.VariantAssays = append(related.R.VariantAssays, o) + } + + return nil +} + +// RemoveVariant relationship. +// Sets o.R.Variant to nil. +// Removes o from all passed in related items' relationships struct. +func (o *Assay) RemoveVariant(ctx context.Context, exec boil.ContextExecutor, related *VariantSequence) error { + var err error + + queries.SetScanner(&o.VariantID, nil) + if _, err = o.Update(ctx, exec, boil.Whitelist("variant_id")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.Variant = nil + } + if related == nil || related.R == nil { + return nil + } + + for i, ri := range related.R.VariantAssays { + if queries.Equal(o.VariantID, ri.VariantID) { + continue + } + + ln := len(related.R.VariantAssays) + if ln > 1 && i < ln-1 { + related.R.VariantAssays[i] = related.R.VariantAssays[ln-1] + } + related.R.VariantAssays = related.R.VariantAssays[:ln-1] + break + } + return nil +} + +// SetTissue of the assay to the related item. +// Sets o.R.Tissue to related. +// Adds o to related.R.TissueAssays. +func (o *Assay) SetTissue(ctx context.Context, exec boil.ContextExecutor, insert bool, related *TissueDictionary) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"assays\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"tissue_id"}), + strmangle.WhereClause("\"", "\"", 0, assayPrimaryKeyColumns), + ) + values := []interface{}{related.TissueID, o.AssayID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.TissueID, related.TissueID) + if o.R == nil { + o.R = &assayR{ + Tissue: related, + } + } else { + o.R.Tissue = related + } + + if related.R == nil { + related.R = &tissueDictionaryR{ + TissueAssays: AssaySlice{o}, + } + } else { + related.R.TissueAssays = append(related.R.TissueAssays, o) + } + + return nil +} + +// RemoveTissue relationship. +// Sets o.R.Tissue to nil. +// Removes o from all passed in related items' relationships struct. +func (o *Assay) RemoveTissue(ctx context.Context, exec boil.ContextExecutor, related *TissueDictionary) error { + var err error + + queries.SetScanner(&o.TissueID, nil) + if _, err = o.Update(ctx, exec, boil.Whitelist("tissue_id")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.Tissue = nil + } + if related == nil || related.R == nil { + return nil + } + + for i, ri := range related.R.TissueAssays { + if queries.Equal(o.TissueID, ri.TissueID) { + continue + } + + ln := len(related.R.TissueAssays) + if ln > 1 && i < ln-1 { + related.R.TissueAssays[i] = related.R.TissueAssays[ln-1] + } + related.R.TissueAssays = related.R.TissueAssays[:ln-1] + break + } + return nil +} + +// SetTidTargetDictionary of the assay to the related item. +// Sets o.R.TidTargetDictionary to related. +// Adds o to related.R.TidAssays. +func (o *Assay) SetTidTargetDictionary(ctx context.Context, exec boil.ContextExecutor, insert bool, related *TargetDictionary) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"assays\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"tid"}), + strmangle.WhereClause("\"", "\"", 0, assayPrimaryKeyColumns), + ) + values := []interface{}{related.Tid, o.AssayID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.Tid, related.Tid) + if o.R == nil { + o.R = &assayR{ + TidTargetDictionary: related, + } + } else { + o.R.TidTargetDictionary = related + } + + if related.R == nil { + related.R = &targetDictionaryR{ + TidAssays: AssaySlice{o}, + } + } else { + related.R.TidAssays = append(related.R.TidAssays, o) + } + + return nil +} + +// RemoveTidTargetDictionary relationship. +// Sets o.R.TidTargetDictionary to nil. +// Removes o from all passed in related items' relationships struct. +func (o *Assay) RemoveTidTargetDictionary(ctx context.Context, exec boil.ContextExecutor, related *TargetDictionary) error { + var err error + + queries.SetScanner(&o.Tid, nil) + if _, err = o.Update(ctx, exec, boil.Whitelist("tid")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.TidTargetDictionary = nil + } + if related == nil || related.R == nil { + return nil + } + + for i, ri := range related.R.TidAssays { + if queries.Equal(o.Tid, ri.Tid) { + continue + } + + ln := len(related.R.TidAssays) + if ln > 1 && i < ln-1 { + related.R.TidAssays[i] = related.R.TidAssays[ln-1] + } + related.R.TidAssays = related.R.TidAssays[:ln-1] + break + } + return nil +} + +// SetSRC of the assay to the related item. +// Sets o.R.SRC to related. +// Adds o to related.R.SRCAssays. +func (o *Assay) SetSRC(ctx context.Context, exec boil.ContextExecutor, insert bool, related *Source) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"assays\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"src_id"}), + strmangle.WhereClause("\"", "\"", 0, assayPrimaryKeyColumns), + ) + values := []interface{}{related.SRCID, o.AssayID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.SRCID = related.SRCID + if o.R == nil { + o.R = &assayR{ + SRC: related, + } + } else { + o.R.SRC = related + } + + if related.R == nil { + related.R = &sourceR{ + SRCAssays: AssaySlice{o}, + } + } else { + related.R.SRCAssays = append(related.R.SRCAssays, o) + } + + return nil +} + +// SetAssayRelationshipType of the assay to the related item. +// Sets o.R.AssayRelationshipType to related. +// Adds o to related.R.Assays. +func (o *Assay) SetAssayRelationshipType(ctx context.Context, exec boil.ContextExecutor, insert bool, related *RelationshipType) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"assays\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"relationship_type"}), + strmangle.WhereClause("\"", "\"", 0, assayPrimaryKeyColumns), + ) + values := []interface{}{related.RelationshipType, o.AssayID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.RelationshipType, related.RelationshipType) + if o.R == nil { + o.R = &assayR{ + AssayRelationshipType: related, + } + } else { + o.R.AssayRelationshipType = related + } + + if related.R == nil { + related.R = &relationshipTypeR{ + Assays: AssaySlice{o}, + } + } else { + related.R.Assays = append(related.R.Assays, o) + } + + return nil +} + +// RemoveAssayRelationshipType relationship. +// Sets o.R.AssayRelationshipType to nil. +// Removes o from all passed in related items' relationships struct. +func (o *Assay) RemoveAssayRelationshipType(ctx context.Context, exec boil.ContextExecutor, related *RelationshipType) error { + var err error + + queries.SetScanner(&o.RelationshipType, nil) + if _, err = o.Update(ctx, exec, boil.Whitelist("relationship_type")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.AssayRelationshipType = nil + } + if related == nil || related.R == nil { + return nil + } + + for i, ri := range related.R.Assays { + if queries.Equal(o.RelationshipType, ri.RelationshipType) { + continue + } + + ln := len(related.R.Assays) + if ln > 1 && i < ln-1 { + related.R.Assays[i] = related.R.Assays[ln-1] + } + related.R.Assays = related.R.Assays[:ln-1] + break + } + return nil +} + +// SetDoc of the assay to the related item. +// Sets o.R.Doc to related. +// Adds o to related.R.Assays. +func (o *Assay) SetDoc(ctx context.Context, exec boil.ContextExecutor, insert bool, related *Doc) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"assays\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"doc_id"}), + strmangle.WhereClause("\"", "\"", 0, assayPrimaryKeyColumns), + ) + values := []interface{}{related.DocID, o.AssayID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.DocID = related.DocID + if o.R == nil { + o.R = &assayR{ + Doc: related, + } + } else { + o.R.Doc = related + } + + if related.R == nil { + related.R = &docR{ + Assays: AssaySlice{o}, + } + } else { + related.R.Assays = append(related.R.Assays, o) + } + + return nil +} + +// SetCuratedByCurationLookup of the assay to the related item. +// Sets o.R.CuratedByCurationLookup to related. +// Adds o to related.R.CuratedByAssays. +func (o *Assay) SetCuratedByCurationLookup(ctx context.Context, exec boil.ContextExecutor, insert bool, related *CurationLookup) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"assays\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"curated_by"}), + strmangle.WhereClause("\"", "\"", 0, assayPrimaryKeyColumns), + ) + values := []interface{}{related.CuratedBy, o.AssayID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.CuratedBy, related.CuratedBy) + if o.R == nil { + o.R = &assayR{ + CuratedByCurationLookup: related, + } + } else { + o.R.CuratedByCurationLookup = related + } + + if related.R == nil { + related.R = &curationLookupR{ + CuratedByAssays: AssaySlice{o}, + } + } else { + related.R.CuratedByAssays = append(related.R.CuratedByAssays, o) + } + + return nil +} + +// RemoveCuratedByCurationLookup relationship. +// Sets o.R.CuratedByCurationLookup to nil. +// Removes o from all passed in related items' relationships struct. +func (o *Assay) RemoveCuratedByCurationLookup(ctx context.Context, exec boil.ContextExecutor, related *CurationLookup) error { + var err error + + queries.SetScanner(&o.CuratedBy, nil) + if _, err = o.Update(ctx, exec, boil.Whitelist("curated_by")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.CuratedByCurationLookup = nil + } + if related == nil || related.R == nil { + return nil + } + + for i, ri := range related.R.CuratedByAssays { + if queries.Equal(o.CuratedBy, ri.CuratedBy) { + continue + } + + ln := len(related.R.CuratedByAssays) + if ln > 1 && i < ln-1 { + related.R.CuratedByAssays[i] = related.R.CuratedByAssays[ln-1] + } + related.R.CuratedByAssays = related.R.CuratedByAssays[:ln-1] + break + } + return nil +} + +// SetConfidenceScoreConfidenceScoreLookup of the assay to the related item. +// Sets o.R.ConfidenceScoreConfidenceScoreLookup to related. +// Adds o to related.R.ConfidenceScoreAssays. +func (o *Assay) SetConfidenceScoreConfidenceScoreLookup(ctx context.Context, exec boil.ContextExecutor, insert bool, related *ConfidenceScoreLookup) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"assays\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"confidence_score"}), + strmangle.WhereClause("\"", "\"", 0, assayPrimaryKeyColumns), + ) + values := []interface{}{related.ConfidenceScore, o.AssayID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.ConfidenceScore, related.ConfidenceScore) + if o.R == nil { + o.R = &assayR{ + ConfidenceScoreConfidenceScoreLookup: related, + } + } else { + o.R.ConfidenceScoreConfidenceScoreLookup = related + } + + if related.R == nil { + related.R = &confidenceScoreLookupR{ + ConfidenceScoreAssays: AssaySlice{o}, + } + } else { + related.R.ConfidenceScoreAssays = append(related.R.ConfidenceScoreAssays, o) + } + + return nil +} + +// RemoveConfidenceScoreConfidenceScoreLookup relationship. +// Sets o.R.ConfidenceScoreConfidenceScoreLookup to nil. +// Removes o from all passed in related items' relationships struct. +func (o *Assay) RemoveConfidenceScoreConfidenceScoreLookup(ctx context.Context, exec boil.ContextExecutor, related *ConfidenceScoreLookup) error { + var err error + + queries.SetScanner(&o.ConfidenceScore, nil) + if _, err = o.Update(ctx, exec, boil.Whitelist("confidence_score")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.ConfidenceScoreConfidenceScoreLookup = nil + } + if related == nil || related.R == nil { + return nil + } + + for i, ri := range related.R.ConfidenceScoreAssays { + if queries.Equal(o.ConfidenceScore, ri.ConfidenceScore) { + continue + } + + ln := len(related.R.ConfidenceScoreAssays) + if ln > 1 && i < ln-1 { + related.R.ConfidenceScoreAssays[i] = related.R.ConfidenceScoreAssays[ln-1] + } + related.R.ConfidenceScoreAssays = related.R.ConfidenceScoreAssays[:ln-1] + break + } + return nil +} + +// SetChembl of the assay to the related item. +// Sets o.R.Chembl to related. +// Adds o to related.R.ChemblAssay. +func (o *Assay) SetChembl(ctx context.Context, exec boil.ContextExecutor, insert bool, related *ChemblIDLookup) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"assays\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"chembl_id"}), + strmangle.WhereClause("\"", "\"", 0, assayPrimaryKeyColumns), + ) + values := []interface{}{related.ChemblID, o.AssayID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.ChemblID = related.ChemblID + if o.R == nil { + o.R = &assayR{ + Chembl: related, + } + } else { + o.R.Chembl = related + } + + if related.R == nil { + related.R = &chemblIDLookupR{ + ChemblAssay: o, + } + } else { + related.R.ChemblAssay = o + } + + return nil +} + +// SetCell of the assay to the related item. +// Sets o.R.Cell to related. +// Adds o to related.R.CellAssays. +func (o *Assay) SetCell(ctx context.Context, exec boil.ContextExecutor, insert bool, related *CellDictionary) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"assays\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"cell_id"}), + strmangle.WhereClause("\"", "\"", 0, assayPrimaryKeyColumns), + ) + values := []interface{}{related.CellID, o.AssayID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.CellID, related.CellID) + if o.R == nil { + o.R = &assayR{ + Cell: related, + } + } else { + o.R.Cell = related + } + + if related.R == nil { + related.R = &cellDictionaryR{ + CellAssays: AssaySlice{o}, + } + } else { + related.R.CellAssays = append(related.R.CellAssays, o) + } + + return nil +} + +// RemoveCell relationship. +// Sets o.R.Cell to nil. +// Removes o from all passed in related items' relationships struct. +func (o *Assay) RemoveCell(ctx context.Context, exec boil.ContextExecutor, related *CellDictionary) error { + var err error + + queries.SetScanner(&o.CellID, nil) + if _, err = o.Update(ctx, exec, boil.Whitelist("cell_id")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.Cell = nil + } + if related == nil || related.R == nil { + return nil + } + + for i, ri := range related.R.CellAssays { + if queries.Equal(o.CellID, ri.CellID) { + continue + } + + ln := len(related.R.CellAssays) + if ln > 1 && i < ln-1 { + related.R.CellAssays[i] = related.R.CellAssays[ln-1] + } + related.R.CellAssays = related.R.CellAssays[:ln-1] + break + } + return nil +} + +// SetAssayAssayType of the assay to the related item. +// Sets o.R.AssayAssayType to related. +// Adds o to related.R.Assays. +func (o *Assay) SetAssayAssayType(ctx context.Context, exec boil.ContextExecutor, insert bool, related *AssayType) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"assays\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"assay_type"}), + strmangle.WhereClause("\"", "\"", 0, assayPrimaryKeyColumns), + ) + values := []interface{}{related.AssayType, o.AssayID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.AssayType, related.AssayType) + if o.R == nil { + o.R = &assayR{ + AssayAssayType: related, + } + } else { + o.R.AssayAssayType = related + } + + if related.R == nil { + related.R = &assayTypeR{ + Assays: AssaySlice{o}, + } + } else { + related.R.Assays = append(related.R.Assays, o) + } + + return nil +} + +// RemoveAssayAssayType relationship. +// Sets o.R.AssayAssayType to nil. +// Removes o from all passed in related items' relationships struct. +func (o *Assay) RemoveAssayAssayType(ctx context.Context, exec boil.ContextExecutor, related *AssayType) error { + var err error + + queries.SetScanner(&o.AssayType, nil) + if _, err = o.Update(ctx, exec, boil.Whitelist("assay_type")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.AssayAssayType = nil + } + if related == nil || related.R == nil { + return nil + } + + for i, ri := range related.R.Assays { + if queries.Equal(o.AssayType, ri.AssayType) { + continue + } + + ln := len(related.R.Assays) + if ln > 1 && i < ln-1 { + related.R.Assays[i] = related.R.Assays[ln-1] + } + related.R.Assays = related.R.Assays[:ln-1] + break + } + return nil +} + +// AddActivities adds the given related objects to the existing relationships +// of the assay, optionally inserting them as new records. +// Appends related to o.R.Activities. +// Sets related.R.Assay appropriately. +func (o *Assay) AddActivities(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Activity) error { + var err error + for _, rel := range related { + if insert { + rel.AssayID = o.AssayID + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"activities\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"assay_id"}), + strmangle.WhereClause("\"", "\"", 0, activityPrimaryKeyColumns), + ) + values := []interface{}{o.AssayID, rel.ActivityID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.AssayID = o.AssayID + } + } + + if o.R == nil { + o.R = &assayR{ + Activities: related, + } + } else { + o.R.Activities = append(o.R.Activities, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &activityR{ + Assay: o, + } + } else { + rel.R.Assay = o + } + } + return nil +} + +// AddAssayClassMaps adds the given related objects to the existing relationships +// of the assay, optionally inserting them as new records. +// Appends related to o.R.AssayClassMaps. +// Sets related.R.Assay appropriately. +func (o *Assay) AddAssayClassMaps(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*AssayClassMap) error { + var err error + for _, rel := range related { + if insert { + rel.AssayID = o.AssayID + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"assay_class_map\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"assay_id"}), + strmangle.WhereClause("\"", "\"", 0, assayClassMapPrimaryKeyColumns), + ) + values := []interface{}{o.AssayID, rel.AssCLSMapID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.AssayID = o.AssayID + } + } + + if o.R == nil { + o.R = &assayR{ + AssayClassMaps: related, + } + } else { + o.R.AssayClassMaps = append(o.R.AssayClassMaps, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &assayClassMapR{ + Assay: o, + } + } else { + rel.R.Assay = o + } + } + return nil +} + +// AddAssayParameters adds the given related objects to the existing relationships +// of the assay, optionally inserting them as new records. +// Appends related to o.R.AssayParameters. +// Sets related.R.Assay appropriately. +func (o *Assay) AddAssayParameters(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*AssayParameter) error { + var err error + for _, rel := range related { + if insert { + rel.AssayID = o.AssayID + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"assay_parameters\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"assay_id"}), + strmangle.WhereClause("\"", "\"", 0, assayParameterPrimaryKeyColumns), + ) + values := []interface{}{o.AssayID, rel.AssayParamID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.AssayID = o.AssayID + } + } + + if o.R == nil { + o.R = &assayR{ + AssayParameters: related, + } + } else { + o.R.AssayParameters = append(o.R.AssayParameters, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &assayParameterR{ + Assay: o, + } + } else { + rel.R.Assay = o + } + } + return nil +} + +// Assays retrieves all the records using an executor. +func Assays(mods ...qm.QueryMod) assayQuery { + mods = append(mods, qm.From("\"assays\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"assays\".*"}) + } + + return assayQuery{q} +} + +// FindAssay retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindAssay(ctx context.Context, exec boil.ContextExecutor, assayID int64, selectCols ...string) (*Assay, error) { + assayObj := &Assay{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"assays\" where \"assay_id\"=?", sel, + ) + + q := queries.Raw(query, assayID) + + err := q.Bind(ctx, exec, assayObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from assays") + } + + if err = assayObj.doAfterSelectHooks(ctx, exec); err != nil { + return assayObj, err + } + + return assayObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *Assay) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no assays provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(assayColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + assayInsertCacheMut.RLock() + cache, cached := assayInsertCache[key] + assayInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + assayAllColumns, + assayColumnsWithDefault, + assayColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(assayType, assayMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(assayType, assayMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"assays\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"assays\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into assays") + } + + if !cached { + assayInsertCacheMut.Lock() + assayInsertCache[key] = cache + assayInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the Assay. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *Assay) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + assayUpdateCacheMut.RLock() + cache, cached := assayUpdateCache[key] + assayUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + assayAllColumns, + assayPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update assays, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"assays\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, assayPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(assayType, assayMapping, append(wl, assayPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update assays row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for assays") + } + + if !cached { + assayUpdateCacheMut.Lock() + assayUpdateCache[key] = cache + assayUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q assayQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for assays") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for assays") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o AssaySlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), assayPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"assays\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, assayPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in assay slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all assay") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *Assay) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no assays provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(assayColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + assayUpsertCacheMut.RLock() + cache, cached := assayUpsertCache[key] + assayUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + assayAllColumns, + assayColumnsWithDefault, + assayColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + assayAllColumns, + assayPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert assays, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(assayPrimaryKeyColumns)) + copy(conflict, assayPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"assays\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(assayType, assayMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(assayType, assayMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert assays") + } + + if !cached { + assayUpsertCacheMut.Lock() + assayUpsertCache[key] = cache + assayUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single Assay record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *Assay) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no Assay provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), assayPrimaryKeyMapping) + sql := "DELETE FROM \"assays\" WHERE \"assay_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from assays") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for assays") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q assayQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no assayQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from assays") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for assays") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o AssaySlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(assayBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), assayPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"assays\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, assayPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from assay slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for assays") + } + + if len(assayAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *Assay) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindAssay(ctx, exec, o.AssayID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *AssaySlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := AssaySlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), assayPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"assays\".* FROM \"assays\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, assayPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in AssaySlice") + } + + *o = slice + + return nil +} + +// AssayExists checks if the Assay row exists. +func AssayExists(ctx context.Context, exec boil.ContextExecutor, assayID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"assays\" where \"assay_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, assayID) + } + row := exec.QueryRowContext(ctx, sql, assayID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if assays exists") + } + + return exists, nil +} diff --git a/models/atc_classification.go b/models/atc_classification.go new file mode 100644 index 0000000..06d05db --- /dev/null +++ b/models/atc_classification.go @@ -0,0 +1,1297 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// AtcClassification is an object representing the database table. +type AtcClassification struct { + WhoName null.String `boil:"who_name" json:"who_name,omitempty" toml:"who_name" yaml:"who_name,omitempty"` + Level1 null.String `boil:"level1" json:"level1,omitempty" toml:"level1" yaml:"level1,omitempty"` + Level2 null.String `boil:"level2" json:"level2,omitempty" toml:"level2" yaml:"level2,omitempty"` + Level3 null.String `boil:"level3" json:"level3,omitempty" toml:"level3" yaml:"level3,omitempty"` + Level4 null.String `boil:"level4" json:"level4,omitempty" toml:"level4" yaml:"level4,omitempty"` + Level5 string `boil:"level5" json:"level5" toml:"level5" yaml:"level5"` + Level1Description null.String `boil:"level1_description" json:"level1_description,omitempty" toml:"level1_description" yaml:"level1_description,omitempty"` + Level2Description null.String `boil:"level2_description" json:"level2_description,omitempty" toml:"level2_description" yaml:"level2_description,omitempty"` + Level3Description null.String `boil:"level3_description" json:"level3_description,omitempty" toml:"level3_description" yaml:"level3_description,omitempty"` + Level4Description null.String `boil:"level4_description" json:"level4_description,omitempty" toml:"level4_description" yaml:"level4_description,omitempty"` + + R *atcClassificationR `boil:"-" json:"-" toml:"-" yaml:"-"` + L atcClassificationL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var AtcClassificationColumns = struct { + WhoName string + Level1 string + Level2 string + Level3 string + Level4 string + Level5 string + Level1Description string + Level2Description string + Level3Description string + Level4Description string +}{ + WhoName: "who_name", + Level1: "level1", + Level2: "level2", + Level3: "level3", + Level4: "level4", + Level5: "level5", + Level1Description: "level1_description", + Level2Description: "level2_description", + Level3Description: "level3_description", + Level4Description: "level4_description", +} + +var AtcClassificationTableColumns = struct { + WhoName string + Level1 string + Level2 string + Level3 string + Level4 string + Level5 string + Level1Description string + Level2Description string + Level3Description string + Level4Description string +}{ + WhoName: "atc_classification.who_name", + Level1: "atc_classification.level1", + Level2: "atc_classification.level2", + Level3: "atc_classification.level3", + Level4: "atc_classification.level4", + Level5: "atc_classification.level5", + Level1Description: "atc_classification.level1_description", + Level2Description: "atc_classification.level2_description", + Level3Description: "atc_classification.level3_description", + Level4Description: "atc_classification.level4_description", +} + +// Generated where + +var AtcClassificationWhere = struct { + WhoName whereHelpernull_String + Level1 whereHelpernull_String + Level2 whereHelpernull_String + Level3 whereHelpernull_String + Level4 whereHelpernull_String + Level5 whereHelperstring + Level1Description whereHelpernull_String + Level2Description whereHelpernull_String + Level3Description whereHelpernull_String + Level4Description whereHelpernull_String +}{ + WhoName: whereHelpernull_String{field: "\"atc_classification\".\"who_name\""}, + Level1: whereHelpernull_String{field: "\"atc_classification\".\"level1\""}, + Level2: whereHelpernull_String{field: "\"atc_classification\".\"level2\""}, + Level3: whereHelpernull_String{field: "\"atc_classification\".\"level3\""}, + Level4: whereHelpernull_String{field: "\"atc_classification\".\"level4\""}, + Level5: whereHelperstring{field: "\"atc_classification\".\"level5\""}, + Level1Description: whereHelpernull_String{field: "\"atc_classification\".\"level1_description\""}, + Level2Description: whereHelpernull_String{field: "\"atc_classification\".\"level2_description\""}, + Level3Description: whereHelpernull_String{field: "\"atc_classification\".\"level3_description\""}, + Level4Description: whereHelpernull_String{field: "\"atc_classification\".\"level4_description\""}, +} + +// AtcClassificationRels is where relationship names are stored. +var AtcClassificationRels = struct { + AtcCodeDefinedDailyDoses string + Level5MoleculeAtcClassifications string +}{ + AtcCodeDefinedDailyDoses: "AtcCodeDefinedDailyDoses", + Level5MoleculeAtcClassifications: "Level5MoleculeAtcClassifications", +} + +// atcClassificationR is where relationships are stored. +type atcClassificationR struct { + AtcCodeDefinedDailyDoses DefinedDailyDoseSlice `boil:"AtcCodeDefinedDailyDoses" json:"AtcCodeDefinedDailyDoses" toml:"AtcCodeDefinedDailyDoses" yaml:"AtcCodeDefinedDailyDoses"` + Level5MoleculeAtcClassifications MoleculeAtcClassificationSlice `boil:"Level5MoleculeAtcClassifications" json:"Level5MoleculeAtcClassifications" toml:"Level5MoleculeAtcClassifications" yaml:"Level5MoleculeAtcClassifications"` +} + +// NewStruct creates a new relationship struct +func (*atcClassificationR) NewStruct() *atcClassificationR { + return &atcClassificationR{} +} + +func (r *atcClassificationR) GetAtcCodeDefinedDailyDoses() DefinedDailyDoseSlice { + if r == nil { + return nil + } + return r.AtcCodeDefinedDailyDoses +} + +func (r *atcClassificationR) GetLevel5MoleculeAtcClassifications() MoleculeAtcClassificationSlice { + if r == nil { + return nil + } + return r.Level5MoleculeAtcClassifications +} + +// atcClassificationL is where Load methods for each relationship are stored. +type atcClassificationL struct{} + +var ( + atcClassificationAllColumns = []string{"who_name", "level1", "level2", "level3", "level4", "level5", "level1_description", "level2_description", "level3_description", "level4_description"} + atcClassificationColumnsWithoutDefault = []string{"level5"} + atcClassificationColumnsWithDefault = []string{"who_name", "level1", "level2", "level3", "level4", "level1_description", "level2_description", "level3_description", "level4_description"} + atcClassificationPrimaryKeyColumns = []string{"level5"} + atcClassificationGeneratedColumns = []string{} +) + +type ( + // AtcClassificationSlice is an alias for a slice of pointers to AtcClassification. + // This should almost always be used instead of []AtcClassification. + AtcClassificationSlice []*AtcClassification + // AtcClassificationHook is the signature for custom AtcClassification hook methods + AtcClassificationHook func(context.Context, boil.ContextExecutor, *AtcClassification) error + + atcClassificationQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + atcClassificationType = reflect.TypeOf(&AtcClassification{}) + atcClassificationMapping = queries.MakeStructMapping(atcClassificationType) + atcClassificationPrimaryKeyMapping, _ = queries.BindMapping(atcClassificationType, atcClassificationMapping, atcClassificationPrimaryKeyColumns) + atcClassificationInsertCacheMut sync.RWMutex + atcClassificationInsertCache = make(map[string]insertCache) + atcClassificationUpdateCacheMut sync.RWMutex + atcClassificationUpdateCache = make(map[string]updateCache) + atcClassificationUpsertCacheMut sync.RWMutex + atcClassificationUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var atcClassificationAfterSelectHooks []AtcClassificationHook + +var atcClassificationBeforeInsertHooks []AtcClassificationHook +var atcClassificationAfterInsertHooks []AtcClassificationHook + +var atcClassificationBeforeUpdateHooks []AtcClassificationHook +var atcClassificationAfterUpdateHooks []AtcClassificationHook + +var atcClassificationBeforeDeleteHooks []AtcClassificationHook +var atcClassificationAfterDeleteHooks []AtcClassificationHook + +var atcClassificationBeforeUpsertHooks []AtcClassificationHook +var atcClassificationAfterUpsertHooks []AtcClassificationHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *AtcClassification) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range atcClassificationAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *AtcClassification) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range atcClassificationBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *AtcClassification) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range atcClassificationAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *AtcClassification) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range atcClassificationBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *AtcClassification) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range atcClassificationAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *AtcClassification) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range atcClassificationBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *AtcClassification) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range atcClassificationAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *AtcClassification) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range atcClassificationBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *AtcClassification) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range atcClassificationAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddAtcClassificationHook registers your hook function for all future operations. +func AddAtcClassificationHook(hookPoint boil.HookPoint, atcClassificationHook AtcClassificationHook) { + switch hookPoint { + case boil.AfterSelectHook: + atcClassificationAfterSelectHooks = append(atcClassificationAfterSelectHooks, atcClassificationHook) + case boil.BeforeInsertHook: + atcClassificationBeforeInsertHooks = append(atcClassificationBeforeInsertHooks, atcClassificationHook) + case boil.AfterInsertHook: + atcClassificationAfterInsertHooks = append(atcClassificationAfterInsertHooks, atcClassificationHook) + case boil.BeforeUpdateHook: + atcClassificationBeforeUpdateHooks = append(atcClassificationBeforeUpdateHooks, atcClassificationHook) + case boil.AfterUpdateHook: + atcClassificationAfterUpdateHooks = append(atcClassificationAfterUpdateHooks, atcClassificationHook) + case boil.BeforeDeleteHook: + atcClassificationBeforeDeleteHooks = append(atcClassificationBeforeDeleteHooks, atcClassificationHook) + case boil.AfterDeleteHook: + atcClassificationAfterDeleteHooks = append(atcClassificationAfterDeleteHooks, atcClassificationHook) + case boil.BeforeUpsertHook: + atcClassificationBeforeUpsertHooks = append(atcClassificationBeforeUpsertHooks, atcClassificationHook) + case boil.AfterUpsertHook: + atcClassificationAfterUpsertHooks = append(atcClassificationAfterUpsertHooks, atcClassificationHook) + } +} + +// One returns a single atcClassification record from the query. +func (q atcClassificationQuery) One(ctx context.Context, exec boil.ContextExecutor) (*AtcClassification, error) { + o := &AtcClassification{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for atc_classification") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all AtcClassification records from the query. +func (q atcClassificationQuery) All(ctx context.Context, exec boil.ContextExecutor) (AtcClassificationSlice, error) { + var o []*AtcClassification + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to AtcClassification slice") + } + + if len(atcClassificationAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all AtcClassification records in the query. +func (q atcClassificationQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count atc_classification rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q atcClassificationQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if atc_classification exists") + } + + return count > 0, nil +} + +// AtcCodeDefinedDailyDoses retrieves all the defined_daily_dose's DefinedDailyDoses with an executor via atc_code column. +func (o *AtcClassification) AtcCodeDefinedDailyDoses(mods ...qm.QueryMod) definedDailyDoseQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"defined_daily_dose\".\"atc_code\"=?", o.Level5), + ) + + return DefinedDailyDoses(queryMods...) +} + +// Level5MoleculeAtcClassifications retrieves all the molecule_atc_classification's MoleculeAtcClassifications with an executor via level5 column. +func (o *AtcClassification) Level5MoleculeAtcClassifications(mods ...qm.QueryMod) moleculeAtcClassificationQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"molecule_atc_classification\".\"level5\"=?", o.Level5), + ) + + return MoleculeAtcClassifications(queryMods...) +} + +// LoadAtcCodeDefinedDailyDoses allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (atcClassificationL) LoadAtcCodeDefinedDailyDoses(ctx context.Context, e boil.ContextExecutor, singular bool, maybeAtcClassification interface{}, mods queries.Applicator) error { + var slice []*AtcClassification + var object *AtcClassification + + if singular { + object = maybeAtcClassification.(*AtcClassification) + } else { + slice = *maybeAtcClassification.(*[]*AtcClassification) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &atcClassificationR{} + } + args = append(args, object.Level5) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &atcClassificationR{} + } + + for _, a := range args { + if a == obj.Level5 { + continue Outer + } + } + + args = append(args, obj.Level5) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`defined_daily_dose`), + qm.WhereIn(`defined_daily_dose.atc_code in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load defined_daily_dose") + } + + var resultSlice []*DefinedDailyDose + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice defined_daily_dose") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on defined_daily_dose") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for defined_daily_dose") + } + + if len(definedDailyDoseAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.AtcCodeDefinedDailyDoses = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &definedDailyDoseR{} + } + foreign.R.AtcCodeAtcClassification = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.Level5 == foreign.AtcCode { + local.R.AtcCodeDefinedDailyDoses = append(local.R.AtcCodeDefinedDailyDoses, foreign) + if foreign.R == nil { + foreign.R = &definedDailyDoseR{} + } + foreign.R.AtcCodeAtcClassification = local + break + } + } + } + + return nil +} + +// LoadLevel5MoleculeAtcClassifications allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (atcClassificationL) LoadLevel5MoleculeAtcClassifications(ctx context.Context, e boil.ContextExecutor, singular bool, maybeAtcClassification interface{}, mods queries.Applicator) error { + var slice []*AtcClassification + var object *AtcClassification + + if singular { + object = maybeAtcClassification.(*AtcClassification) + } else { + slice = *maybeAtcClassification.(*[]*AtcClassification) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &atcClassificationR{} + } + args = append(args, object.Level5) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &atcClassificationR{} + } + + for _, a := range args { + if a == obj.Level5 { + continue Outer + } + } + + args = append(args, obj.Level5) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`molecule_atc_classification`), + qm.WhereIn(`molecule_atc_classification.level5 in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load molecule_atc_classification") + } + + var resultSlice []*MoleculeAtcClassification + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice molecule_atc_classification") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on molecule_atc_classification") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for molecule_atc_classification") + } + + if len(moleculeAtcClassificationAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.Level5MoleculeAtcClassifications = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &moleculeAtcClassificationR{} + } + foreign.R.Level5AtcClassification = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.Level5 == foreign.Level5 { + local.R.Level5MoleculeAtcClassifications = append(local.R.Level5MoleculeAtcClassifications, foreign) + if foreign.R == nil { + foreign.R = &moleculeAtcClassificationR{} + } + foreign.R.Level5AtcClassification = local + break + } + } + } + + return nil +} + +// AddAtcCodeDefinedDailyDoses adds the given related objects to the existing relationships +// of the atc_classification, optionally inserting them as new records. +// Appends related to o.R.AtcCodeDefinedDailyDoses. +// Sets related.R.AtcCodeAtcClassification appropriately. +func (o *AtcClassification) AddAtcCodeDefinedDailyDoses(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*DefinedDailyDose) error { + var err error + for _, rel := range related { + if insert { + rel.AtcCode = o.Level5 + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"defined_daily_dose\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"atc_code"}), + strmangle.WhereClause("\"", "\"", 0, definedDailyDosePrimaryKeyColumns), + ) + values := []interface{}{o.Level5, rel.DDDID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.AtcCode = o.Level5 + } + } + + if o.R == nil { + o.R = &atcClassificationR{ + AtcCodeDefinedDailyDoses: related, + } + } else { + o.R.AtcCodeDefinedDailyDoses = append(o.R.AtcCodeDefinedDailyDoses, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &definedDailyDoseR{ + AtcCodeAtcClassification: o, + } + } else { + rel.R.AtcCodeAtcClassification = o + } + } + return nil +} + +// AddLevel5MoleculeAtcClassifications adds the given related objects to the existing relationships +// of the atc_classification, optionally inserting them as new records. +// Appends related to o.R.Level5MoleculeAtcClassifications. +// Sets related.R.Level5AtcClassification appropriately. +func (o *AtcClassification) AddLevel5MoleculeAtcClassifications(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*MoleculeAtcClassification) error { + var err error + for _, rel := range related { + if insert { + rel.Level5 = o.Level5 + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"molecule_atc_classification\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"level5"}), + strmangle.WhereClause("\"", "\"", 0, moleculeAtcClassificationPrimaryKeyColumns), + ) + values := []interface{}{o.Level5, rel.MolAtcID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.Level5 = o.Level5 + } + } + + if o.R == nil { + o.R = &atcClassificationR{ + Level5MoleculeAtcClassifications: related, + } + } else { + o.R.Level5MoleculeAtcClassifications = append(o.R.Level5MoleculeAtcClassifications, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &moleculeAtcClassificationR{ + Level5AtcClassification: o, + } + } else { + rel.R.Level5AtcClassification = o + } + } + return nil +} + +// AtcClassifications retrieves all the records using an executor. +func AtcClassifications(mods ...qm.QueryMod) atcClassificationQuery { + mods = append(mods, qm.From("\"atc_classification\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"atc_classification\".*"}) + } + + return atcClassificationQuery{q} +} + +// FindAtcClassification retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindAtcClassification(ctx context.Context, exec boil.ContextExecutor, level5 string, selectCols ...string) (*AtcClassification, error) { + atcClassificationObj := &AtcClassification{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"atc_classification\" where \"level5\"=?", sel, + ) + + q := queries.Raw(query, level5) + + err := q.Bind(ctx, exec, atcClassificationObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from atc_classification") + } + + if err = atcClassificationObj.doAfterSelectHooks(ctx, exec); err != nil { + return atcClassificationObj, err + } + + return atcClassificationObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *AtcClassification) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no atc_classification provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(atcClassificationColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + atcClassificationInsertCacheMut.RLock() + cache, cached := atcClassificationInsertCache[key] + atcClassificationInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + atcClassificationAllColumns, + atcClassificationColumnsWithDefault, + atcClassificationColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(atcClassificationType, atcClassificationMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(atcClassificationType, atcClassificationMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"atc_classification\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"atc_classification\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into atc_classification") + } + + if !cached { + atcClassificationInsertCacheMut.Lock() + atcClassificationInsertCache[key] = cache + atcClassificationInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the AtcClassification. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *AtcClassification) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + atcClassificationUpdateCacheMut.RLock() + cache, cached := atcClassificationUpdateCache[key] + atcClassificationUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + atcClassificationAllColumns, + atcClassificationPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update atc_classification, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"atc_classification\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, atcClassificationPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(atcClassificationType, atcClassificationMapping, append(wl, atcClassificationPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update atc_classification row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for atc_classification") + } + + if !cached { + atcClassificationUpdateCacheMut.Lock() + atcClassificationUpdateCache[key] = cache + atcClassificationUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q atcClassificationQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for atc_classification") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for atc_classification") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o AtcClassificationSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), atcClassificationPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"atc_classification\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, atcClassificationPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in atcClassification slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all atcClassification") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *AtcClassification) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no atc_classification provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(atcClassificationColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + atcClassificationUpsertCacheMut.RLock() + cache, cached := atcClassificationUpsertCache[key] + atcClassificationUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + atcClassificationAllColumns, + atcClassificationColumnsWithDefault, + atcClassificationColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + atcClassificationAllColumns, + atcClassificationPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert atc_classification, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(atcClassificationPrimaryKeyColumns)) + copy(conflict, atcClassificationPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"atc_classification\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(atcClassificationType, atcClassificationMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(atcClassificationType, atcClassificationMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert atc_classification") + } + + if !cached { + atcClassificationUpsertCacheMut.Lock() + atcClassificationUpsertCache[key] = cache + atcClassificationUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single AtcClassification record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *AtcClassification) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no AtcClassification provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), atcClassificationPrimaryKeyMapping) + sql := "DELETE FROM \"atc_classification\" WHERE \"level5\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from atc_classification") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for atc_classification") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q atcClassificationQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no atcClassificationQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from atc_classification") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for atc_classification") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o AtcClassificationSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(atcClassificationBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), atcClassificationPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"atc_classification\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, atcClassificationPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from atcClassification slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for atc_classification") + } + + if len(atcClassificationAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *AtcClassification) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindAtcClassification(ctx, exec, o.Level5) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *AtcClassificationSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := AtcClassificationSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), atcClassificationPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"atc_classification\".* FROM \"atc_classification\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, atcClassificationPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in AtcClassificationSlice") + } + + *o = slice + + return nil +} + +// AtcClassificationExists checks if the AtcClassification row exists. +func AtcClassificationExists(ctx context.Context, exec boil.ContextExecutor, level5 string) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"atc_classification\" where \"level5\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, level5) + } + row := exec.QueryRowContext(ctx, sql, level5) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if atc_classification exists") + } + + return exists, nil +} diff --git a/models/binding_sites.go b/models/binding_sites.go new file mode 100644 index 0000000..06687e0 --- /dev/null +++ b/models/binding_sites.go @@ -0,0 +1,1780 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// BindingSite is an object representing the database table. +type BindingSite struct { + SiteID int64 `boil:"site_id" json:"site_id" toml:"site_id" yaml:"site_id"` + SiteName null.String `boil:"site_name" json:"site_name,omitempty" toml:"site_name" yaml:"site_name,omitempty"` + Tid null.Int64 `boil:"tid" json:"tid,omitempty" toml:"tid" yaml:"tid,omitempty"` + + R *bindingSiteR `boil:"-" json:"-" toml:"-" yaml:"-"` + L bindingSiteL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var BindingSiteColumns = struct { + SiteID string + SiteName string + Tid string +}{ + SiteID: "site_id", + SiteName: "site_name", + Tid: "tid", +} + +var BindingSiteTableColumns = struct { + SiteID string + SiteName string + Tid string +}{ + SiteID: "binding_sites.site_id", + SiteName: "binding_sites.site_name", + Tid: "binding_sites.tid", +} + +// Generated where + +var BindingSiteWhere = struct { + SiteID whereHelperint64 + SiteName whereHelpernull_String + Tid whereHelpernull_Int64 +}{ + SiteID: whereHelperint64{field: "\"binding_sites\".\"site_id\""}, + SiteName: whereHelpernull_String{field: "\"binding_sites\".\"site_name\""}, + Tid: whereHelpernull_Int64{field: "\"binding_sites\".\"tid\""}, +} + +// BindingSiteRels is where relationship names are stored. +var BindingSiteRels = struct { + TidTargetDictionary string + SiteDrugMechanisms string + SitePredictedBindingDomains string + SiteSiteComponents string +}{ + TidTargetDictionary: "TidTargetDictionary", + SiteDrugMechanisms: "SiteDrugMechanisms", + SitePredictedBindingDomains: "SitePredictedBindingDomains", + SiteSiteComponents: "SiteSiteComponents", +} + +// bindingSiteR is where relationships are stored. +type bindingSiteR struct { + TidTargetDictionary *TargetDictionary `boil:"TidTargetDictionary" json:"TidTargetDictionary" toml:"TidTargetDictionary" yaml:"TidTargetDictionary"` + SiteDrugMechanisms DrugMechanismSlice `boil:"SiteDrugMechanisms" json:"SiteDrugMechanisms" toml:"SiteDrugMechanisms" yaml:"SiteDrugMechanisms"` + SitePredictedBindingDomains PredictedBindingDomainSlice `boil:"SitePredictedBindingDomains" json:"SitePredictedBindingDomains" toml:"SitePredictedBindingDomains" yaml:"SitePredictedBindingDomains"` + SiteSiteComponents SiteComponentSlice `boil:"SiteSiteComponents" json:"SiteSiteComponents" toml:"SiteSiteComponents" yaml:"SiteSiteComponents"` +} + +// NewStruct creates a new relationship struct +func (*bindingSiteR) NewStruct() *bindingSiteR { + return &bindingSiteR{} +} + +func (r *bindingSiteR) GetTidTargetDictionary() *TargetDictionary { + if r == nil { + return nil + } + return r.TidTargetDictionary +} + +func (r *bindingSiteR) GetSiteDrugMechanisms() DrugMechanismSlice { + if r == nil { + return nil + } + return r.SiteDrugMechanisms +} + +func (r *bindingSiteR) GetSitePredictedBindingDomains() PredictedBindingDomainSlice { + if r == nil { + return nil + } + return r.SitePredictedBindingDomains +} + +func (r *bindingSiteR) GetSiteSiteComponents() SiteComponentSlice { + if r == nil { + return nil + } + return r.SiteSiteComponents +} + +// bindingSiteL is where Load methods for each relationship are stored. +type bindingSiteL struct{} + +var ( + bindingSiteAllColumns = []string{"site_id", "site_name", "tid"} + bindingSiteColumnsWithoutDefault = []string{"site_id"} + bindingSiteColumnsWithDefault = []string{"site_name", "tid"} + bindingSitePrimaryKeyColumns = []string{"site_id"} + bindingSiteGeneratedColumns = []string{} +) + +type ( + // BindingSiteSlice is an alias for a slice of pointers to BindingSite. + // This should almost always be used instead of []BindingSite. + BindingSiteSlice []*BindingSite + // BindingSiteHook is the signature for custom BindingSite hook methods + BindingSiteHook func(context.Context, boil.ContextExecutor, *BindingSite) error + + bindingSiteQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + bindingSiteType = reflect.TypeOf(&BindingSite{}) + bindingSiteMapping = queries.MakeStructMapping(bindingSiteType) + bindingSitePrimaryKeyMapping, _ = queries.BindMapping(bindingSiteType, bindingSiteMapping, bindingSitePrimaryKeyColumns) + bindingSiteInsertCacheMut sync.RWMutex + bindingSiteInsertCache = make(map[string]insertCache) + bindingSiteUpdateCacheMut sync.RWMutex + bindingSiteUpdateCache = make(map[string]updateCache) + bindingSiteUpsertCacheMut sync.RWMutex + bindingSiteUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var bindingSiteAfterSelectHooks []BindingSiteHook + +var bindingSiteBeforeInsertHooks []BindingSiteHook +var bindingSiteAfterInsertHooks []BindingSiteHook + +var bindingSiteBeforeUpdateHooks []BindingSiteHook +var bindingSiteAfterUpdateHooks []BindingSiteHook + +var bindingSiteBeforeDeleteHooks []BindingSiteHook +var bindingSiteAfterDeleteHooks []BindingSiteHook + +var bindingSiteBeforeUpsertHooks []BindingSiteHook +var bindingSiteAfterUpsertHooks []BindingSiteHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *BindingSite) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range bindingSiteAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *BindingSite) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range bindingSiteBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *BindingSite) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range bindingSiteAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *BindingSite) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range bindingSiteBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *BindingSite) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range bindingSiteAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *BindingSite) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range bindingSiteBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *BindingSite) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range bindingSiteAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *BindingSite) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range bindingSiteBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *BindingSite) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range bindingSiteAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddBindingSiteHook registers your hook function for all future operations. +func AddBindingSiteHook(hookPoint boil.HookPoint, bindingSiteHook BindingSiteHook) { + switch hookPoint { + case boil.AfterSelectHook: + bindingSiteAfterSelectHooks = append(bindingSiteAfterSelectHooks, bindingSiteHook) + case boil.BeforeInsertHook: + bindingSiteBeforeInsertHooks = append(bindingSiteBeforeInsertHooks, bindingSiteHook) + case boil.AfterInsertHook: + bindingSiteAfterInsertHooks = append(bindingSiteAfterInsertHooks, bindingSiteHook) + case boil.BeforeUpdateHook: + bindingSiteBeforeUpdateHooks = append(bindingSiteBeforeUpdateHooks, bindingSiteHook) + case boil.AfterUpdateHook: + bindingSiteAfterUpdateHooks = append(bindingSiteAfterUpdateHooks, bindingSiteHook) + case boil.BeforeDeleteHook: + bindingSiteBeforeDeleteHooks = append(bindingSiteBeforeDeleteHooks, bindingSiteHook) + case boil.AfterDeleteHook: + bindingSiteAfterDeleteHooks = append(bindingSiteAfterDeleteHooks, bindingSiteHook) + case boil.BeforeUpsertHook: + bindingSiteBeforeUpsertHooks = append(bindingSiteBeforeUpsertHooks, bindingSiteHook) + case boil.AfterUpsertHook: + bindingSiteAfterUpsertHooks = append(bindingSiteAfterUpsertHooks, bindingSiteHook) + } +} + +// One returns a single bindingSite record from the query. +func (q bindingSiteQuery) One(ctx context.Context, exec boil.ContextExecutor) (*BindingSite, error) { + o := &BindingSite{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for binding_sites") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all BindingSite records from the query. +func (q bindingSiteQuery) All(ctx context.Context, exec boil.ContextExecutor) (BindingSiteSlice, error) { + var o []*BindingSite + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to BindingSite slice") + } + + if len(bindingSiteAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all BindingSite records in the query. +func (q bindingSiteQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count binding_sites rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q bindingSiteQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if binding_sites exists") + } + + return count > 0, nil +} + +// TidTargetDictionary pointed to by the foreign key. +func (o *BindingSite) TidTargetDictionary(mods ...qm.QueryMod) targetDictionaryQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"tid\" = ?", o.Tid), + } + + queryMods = append(queryMods, mods...) + + return TargetDictionaries(queryMods...) +} + +// SiteDrugMechanisms retrieves all the drug_mechanism's DrugMechanisms with an executor via site_id column. +func (o *BindingSite) SiteDrugMechanisms(mods ...qm.QueryMod) drugMechanismQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"drug_mechanism\".\"site_id\"=?", o.SiteID), + ) + + return DrugMechanisms(queryMods...) +} + +// SitePredictedBindingDomains retrieves all the predicted_binding_domain's PredictedBindingDomains with an executor via site_id column. +func (o *BindingSite) SitePredictedBindingDomains(mods ...qm.QueryMod) predictedBindingDomainQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"predicted_binding_domains\".\"site_id\"=?", o.SiteID), + ) + + return PredictedBindingDomains(queryMods...) +} + +// SiteSiteComponents retrieves all the site_component's SiteComponents with an executor via site_id column. +func (o *BindingSite) SiteSiteComponents(mods ...qm.QueryMod) siteComponentQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"site_components\".\"site_id\"=?", o.SiteID), + ) + + return SiteComponents(queryMods...) +} + +// LoadTidTargetDictionary allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (bindingSiteL) LoadTidTargetDictionary(ctx context.Context, e boil.ContextExecutor, singular bool, maybeBindingSite interface{}, mods queries.Applicator) error { + var slice []*BindingSite + var object *BindingSite + + if singular { + object = maybeBindingSite.(*BindingSite) + } else { + slice = *maybeBindingSite.(*[]*BindingSite) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &bindingSiteR{} + } + if !queries.IsNil(object.Tid) { + args = append(args, object.Tid) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &bindingSiteR{} + } + + for _, a := range args { + if queries.Equal(a, obj.Tid) { + continue Outer + } + } + + if !queries.IsNil(obj.Tid) { + args = append(args, obj.Tid) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`target_dictionary`), + qm.WhereIn(`target_dictionary.tid in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load TargetDictionary") + } + + var resultSlice []*TargetDictionary + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice TargetDictionary") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for target_dictionary") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for target_dictionary") + } + + if len(bindingSiteAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.TidTargetDictionary = foreign + if foreign.R == nil { + foreign.R = &targetDictionaryR{} + } + foreign.R.TidBindingSites = append(foreign.R.TidBindingSites, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.Tid, foreign.Tid) { + local.R.TidTargetDictionary = foreign + if foreign.R == nil { + foreign.R = &targetDictionaryR{} + } + foreign.R.TidBindingSites = append(foreign.R.TidBindingSites, local) + break + } + } + } + + return nil +} + +// LoadSiteDrugMechanisms allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (bindingSiteL) LoadSiteDrugMechanisms(ctx context.Context, e boil.ContextExecutor, singular bool, maybeBindingSite interface{}, mods queries.Applicator) error { + var slice []*BindingSite + var object *BindingSite + + if singular { + object = maybeBindingSite.(*BindingSite) + } else { + slice = *maybeBindingSite.(*[]*BindingSite) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &bindingSiteR{} + } + args = append(args, object.SiteID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &bindingSiteR{} + } + + for _, a := range args { + if queries.Equal(a, obj.SiteID) { + continue Outer + } + } + + args = append(args, obj.SiteID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`drug_mechanism`), + qm.WhereIn(`drug_mechanism.site_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load drug_mechanism") + } + + var resultSlice []*DrugMechanism + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice drug_mechanism") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on drug_mechanism") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for drug_mechanism") + } + + if len(drugMechanismAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.SiteDrugMechanisms = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &drugMechanismR{} + } + foreign.R.Site = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.SiteID, foreign.SiteID) { + local.R.SiteDrugMechanisms = append(local.R.SiteDrugMechanisms, foreign) + if foreign.R == nil { + foreign.R = &drugMechanismR{} + } + foreign.R.Site = local + break + } + } + } + + return nil +} + +// LoadSitePredictedBindingDomains allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (bindingSiteL) LoadSitePredictedBindingDomains(ctx context.Context, e boil.ContextExecutor, singular bool, maybeBindingSite interface{}, mods queries.Applicator) error { + var slice []*BindingSite + var object *BindingSite + + if singular { + object = maybeBindingSite.(*BindingSite) + } else { + slice = *maybeBindingSite.(*[]*BindingSite) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &bindingSiteR{} + } + args = append(args, object.SiteID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &bindingSiteR{} + } + + for _, a := range args { + if queries.Equal(a, obj.SiteID) { + continue Outer + } + } + + args = append(args, obj.SiteID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`predicted_binding_domains`), + qm.WhereIn(`predicted_binding_domains.site_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load predicted_binding_domains") + } + + var resultSlice []*PredictedBindingDomain + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice predicted_binding_domains") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on predicted_binding_domains") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for predicted_binding_domains") + } + + if len(predictedBindingDomainAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.SitePredictedBindingDomains = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &predictedBindingDomainR{} + } + foreign.R.Site = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.SiteID, foreign.SiteID) { + local.R.SitePredictedBindingDomains = append(local.R.SitePredictedBindingDomains, foreign) + if foreign.R == nil { + foreign.R = &predictedBindingDomainR{} + } + foreign.R.Site = local + break + } + } + } + + return nil +} + +// LoadSiteSiteComponents allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (bindingSiteL) LoadSiteSiteComponents(ctx context.Context, e boil.ContextExecutor, singular bool, maybeBindingSite interface{}, mods queries.Applicator) error { + var slice []*BindingSite + var object *BindingSite + + if singular { + object = maybeBindingSite.(*BindingSite) + } else { + slice = *maybeBindingSite.(*[]*BindingSite) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &bindingSiteR{} + } + args = append(args, object.SiteID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &bindingSiteR{} + } + + for _, a := range args { + if a == obj.SiteID { + continue Outer + } + } + + args = append(args, obj.SiteID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`site_components`), + qm.WhereIn(`site_components.site_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load site_components") + } + + var resultSlice []*SiteComponent + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice site_components") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on site_components") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for site_components") + } + + if len(siteComponentAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.SiteSiteComponents = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &siteComponentR{} + } + foreign.R.Site = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.SiteID == foreign.SiteID { + local.R.SiteSiteComponents = append(local.R.SiteSiteComponents, foreign) + if foreign.R == nil { + foreign.R = &siteComponentR{} + } + foreign.R.Site = local + break + } + } + } + + return nil +} + +// SetTidTargetDictionary of the bindingSite to the related item. +// Sets o.R.TidTargetDictionary to related. +// Adds o to related.R.TidBindingSites. +func (o *BindingSite) SetTidTargetDictionary(ctx context.Context, exec boil.ContextExecutor, insert bool, related *TargetDictionary) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"binding_sites\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"tid"}), + strmangle.WhereClause("\"", "\"", 0, bindingSitePrimaryKeyColumns), + ) + values := []interface{}{related.Tid, o.SiteID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.Tid, related.Tid) + if o.R == nil { + o.R = &bindingSiteR{ + TidTargetDictionary: related, + } + } else { + o.R.TidTargetDictionary = related + } + + if related.R == nil { + related.R = &targetDictionaryR{ + TidBindingSites: BindingSiteSlice{o}, + } + } else { + related.R.TidBindingSites = append(related.R.TidBindingSites, o) + } + + return nil +} + +// RemoveTidTargetDictionary relationship. +// Sets o.R.TidTargetDictionary to nil. +// Removes o from all passed in related items' relationships struct. +func (o *BindingSite) RemoveTidTargetDictionary(ctx context.Context, exec boil.ContextExecutor, related *TargetDictionary) error { + var err error + + queries.SetScanner(&o.Tid, nil) + if _, err = o.Update(ctx, exec, boil.Whitelist("tid")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.TidTargetDictionary = nil + } + if related == nil || related.R == nil { + return nil + } + + for i, ri := range related.R.TidBindingSites { + if queries.Equal(o.Tid, ri.Tid) { + continue + } + + ln := len(related.R.TidBindingSites) + if ln > 1 && i < ln-1 { + related.R.TidBindingSites[i] = related.R.TidBindingSites[ln-1] + } + related.R.TidBindingSites = related.R.TidBindingSites[:ln-1] + break + } + return nil +} + +// AddSiteDrugMechanisms adds the given related objects to the existing relationships +// of the binding_site, optionally inserting them as new records. +// Appends related to o.R.SiteDrugMechanisms. +// Sets related.R.Site appropriately. +func (o *BindingSite) AddSiteDrugMechanisms(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*DrugMechanism) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.SiteID, o.SiteID) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"drug_mechanism\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"site_id"}), + strmangle.WhereClause("\"", "\"", 0, drugMechanismPrimaryKeyColumns), + ) + values := []interface{}{o.SiteID, rel.MecID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.SiteID, o.SiteID) + } + } + + if o.R == nil { + o.R = &bindingSiteR{ + SiteDrugMechanisms: related, + } + } else { + o.R.SiteDrugMechanisms = append(o.R.SiteDrugMechanisms, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &drugMechanismR{ + Site: o, + } + } else { + rel.R.Site = o + } + } + return nil +} + +// SetSiteDrugMechanisms removes all previously related items of the +// binding_site replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.Site's SiteDrugMechanisms accordingly. +// Replaces o.R.SiteDrugMechanisms with related. +// Sets related.R.Site's SiteDrugMechanisms accordingly. +func (o *BindingSite) SetSiteDrugMechanisms(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*DrugMechanism) error { + query := "update \"drug_mechanism\" set \"site_id\" = null where \"site_id\" = ?" + values := []interface{}{o.SiteID} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + if o.R != nil { + for _, rel := range o.R.SiteDrugMechanisms { + queries.SetScanner(&rel.SiteID, nil) + if rel.R == nil { + continue + } + + rel.R.Site = nil + } + o.R.SiteDrugMechanisms = nil + } + + return o.AddSiteDrugMechanisms(ctx, exec, insert, related...) +} + +// RemoveSiteDrugMechanisms relationships from objects passed in. +// Removes related items from R.SiteDrugMechanisms (uses pointer comparison, removal does not keep order) +// Sets related.R.Site. +func (o *BindingSite) RemoveSiteDrugMechanisms(ctx context.Context, exec boil.ContextExecutor, related ...*DrugMechanism) error { + if len(related) == 0 { + return nil + } + + var err error + for _, rel := range related { + queries.SetScanner(&rel.SiteID, nil) + if rel.R != nil { + rel.R.Site = nil + } + if _, err = rel.Update(ctx, exec, boil.Whitelist("site_id")); err != nil { + return err + } + } + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.SiteDrugMechanisms { + if rel != ri { + continue + } + + ln := len(o.R.SiteDrugMechanisms) + if ln > 1 && i < ln-1 { + o.R.SiteDrugMechanisms[i] = o.R.SiteDrugMechanisms[ln-1] + } + o.R.SiteDrugMechanisms = o.R.SiteDrugMechanisms[:ln-1] + break + } + } + + return nil +} + +// AddSitePredictedBindingDomains adds the given related objects to the existing relationships +// of the binding_site, optionally inserting them as new records. +// Appends related to o.R.SitePredictedBindingDomains. +// Sets related.R.Site appropriately. +func (o *BindingSite) AddSitePredictedBindingDomains(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*PredictedBindingDomain) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.SiteID, o.SiteID) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"predicted_binding_domains\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"site_id"}), + strmangle.WhereClause("\"", "\"", 0, predictedBindingDomainPrimaryKeyColumns), + ) + values := []interface{}{o.SiteID, rel.PredbindID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.SiteID, o.SiteID) + } + } + + if o.R == nil { + o.R = &bindingSiteR{ + SitePredictedBindingDomains: related, + } + } else { + o.R.SitePredictedBindingDomains = append(o.R.SitePredictedBindingDomains, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &predictedBindingDomainR{ + Site: o, + } + } else { + rel.R.Site = o + } + } + return nil +} + +// SetSitePredictedBindingDomains removes all previously related items of the +// binding_site replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.Site's SitePredictedBindingDomains accordingly. +// Replaces o.R.SitePredictedBindingDomains with related. +// Sets related.R.Site's SitePredictedBindingDomains accordingly. +func (o *BindingSite) SetSitePredictedBindingDomains(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*PredictedBindingDomain) error { + query := "update \"predicted_binding_domains\" set \"site_id\" = null where \"site_id\" = ?" + values := []interface{}{o.SiteID} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + if o.R != nil { + for _, rel := range o.R.SitePredictedBindingDomains { + queries.SetScanner(&rel.SiteID, nil) + if rel.R == nil { + continue + } + + rel.R.Site = nil + } + o.R.SitePredictedBindingDomains = nil + } + + return o.AddSitePredictedBindingDomains(ctx, exec, insert, related...) +} + +// RemoveSitePredictedBindingDomains relationships from objects passed in. +// Removes related items from R.SitePredictedBindingDomains (uses pointer comparison, removal does not keep order) +// Sets related.R.Site. +func (o *BindingSite) RemoveSitePredictedBindingDomains(ctx context.Context, exec boil.ContextExecutor, related ...*PredictedBindingDomain) error { + if len(related) == 0 { + return nil + } + + var err error + for _, rel := range related { + queries.SetScanner(&rel.SiteID, nil) + if rel.R != nil { + rel.R.Site = nil + } + if _, err = rel.Update(ctx, exec, boil.Whitelist("site_id")); err != nil { + return err + } + } + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.SitePredictedBindingDomains { + if rel != ri { + continue + } + + ln := len(o.R.SitePredictedBindingDomains) + if ln > 1 && i < ln-1 { + o.R.SitePredictedBindingDomains[i] = o.R.SitePredictedBindingDomains[ln-1] + } + o.R.SitePredictedBindingDomains = o.R.SitePredictedBindingDomains[:ln-1] + break + } + } + + return nil +} + +// AddSiteSiteComponents adds the given related objects to the existing relationships +// of the binding_site, optionally inserting them as new records. +// Appends related to o.R.SiteSiteComponents. +// Sets related.R.Site appropriately. +func (o *BindingSite) AddSiteSiteComponents(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*SiteComponent) error { + var err error + for _, rel := range related { + if insert { + rel.SiteID = o.SiteID + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"site_components\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"site_id"}), + strmangle.WhereClause("\"", "\"", 0, siteComponentPrimaryKeyColumns), + ) + values := []interface{}{o.SiteID, rel.SitecompID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.SiteID = o.SiteID + } + } + + if o.R == nil { + o.R = &bindingSiteR{ + SiteSiteComponents: related, + } + } else { + o.R.SiteSiteComponents = append(o.R.SiteSiteComponents, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &siteComponentR{ + Site: o, + } + } else { + rel.R.Site = o + } + } + return nil +} + +// BindingSites retrieves all the records using an executor. +func BindingSites(mods ...qm.QueryMod) bindingSiteQuery { + mods = append(mods, qm.From("\"binding_sites\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"binding_sites\".*"}) + } + + return bindingSiteQuery{q} +} + +// FindBindingSite retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindBindingSite(ctx context.Context, exec boil.ContextExecutor, siteID int64, selectCols ...string) (*BindingSite, error) { + bindingSiteObj := &BindingSite{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"binding_sites\" where \"site_id\"=?", sel, + ) + + q := queries.Raw(query, siteID) + + err := q.Bind(ctx, exec, bindingSiteObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from binding_sites") + } + + if err = bindingSiteObj.doAfterSelectHooks(ctx, exec); err != nil { + return bindingSiteObj, err + } + + return bindingSiteObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *BindingSite) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no binding_sites provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(bindingSiteColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + bindingSiteInsertCacheMut.RLock() + cache, cached := bindingSiteInsertCache[key] + bindingSiteInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + bindingSiteAllColumns, + bindingSiteColumnsWithDefault, + bindingSiteColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(bindingSiteType, bindingSiteMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(bindingSiteType, bindingSiteMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"binding_sites\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"binding_sites\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into binding_sites") + } + + if !cached { + bindingSiteInsertCacheMut.Lock() + bindingSiteInsertCache[key] = cache + bindingSiteInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the BindingSite. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *BindingSite) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + bindingSiteUpdateCacheMut.RLock() + cache, cached := bindingSiteUpdateCache[key] + bindingSiteUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + bindingSiteAllColumns, + bindingSitePrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update binding_sites, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"binding_sites\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, bindingSitePrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(bindingSiteType, bindingSiteMapping, append(wl, bindingSitePrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update binding_sites row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for binding_sites") + } + + if !cached { + bindingSiteUpdateCacheMut.Lock() + bindingSiteUpdateCache[key] = cache + bindingSiteUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q bindingSiteQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for binding_sites") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for binding_sites") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o BindingSiteSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), bindingSitePrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"binding_sites\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, bindingSitePrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in bindingSite slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all bindingSite") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *BindingSite) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no binding_sites provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(bindingSiteColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + bindingSiteUpsertCacheMut.RLock() + cache, cached := bindingSiteUpsertCache[key] + bindingSiteUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + bindingSiteAllColumns, + bindingSiteColumnsWithDefault, + bindingSiteColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + bindingSiteAllColumns, + bindingSitePrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert binding_sites, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(bindingSitePrimaryKeyColumns)) + copy(conflict, bindingSitePrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"binding_sites\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(bindingSiteType, bindingSiteMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(bindingSiteType, bindingSiteMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert binding_sites") + } + + if !cached { + bindingSiteUpsertCacheMut.Lock() + bindingSiteUpsertCache[key] = cache + bindingSiteUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single BindingSite record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *BindingSite) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no BindingSite provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), bindingSitePrimaryKeyMapping) + sql := "DELETE FROM \"binding_sites\" WHERE \"site_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from binding_sites") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for binding_sites") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q bindingSiteQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no bindingSiteQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from binding_sites") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for binding_sites") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o BindingSiteSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(bindingSiteBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), bindingSitePrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"binding_sites\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, bindingSitePrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from bindingSite slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for binding_sites") + } + + if len(bindingSiteAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *BindingSite) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindBindingSite(ctx, exec, o.SiteID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *BindingSiteSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := BindingSiteSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), bindingSitePrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"binding_sites\".* FROM \"binding_sites\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, bindingSitePrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in BindingSiteSlice") + } + + *o = slice + + return nil +} + +// BindingSiteExists checks if the BindingSite row exists. +func BindingSiteExists(ctx context.Context, exec boil.ContextExecutor, siteID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"binding_sites\" where \"site_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, siteID) + } + row := exec.QueryRowContext(ctx, sql, siteID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if binding_sites exists") + } + + return exists, nil +} diff --git a/models/bio_component_sequences.go b/models/bio_component_sequences.go new file mode 100644 index 0000000..bfa0819 --- /dev/null +++ b/models/bio_component_sequences.go @@ -0,0 +1,1101 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// BioComponentSequence is an object representing the database table. +type BioComponentSequence struct { + ComponentID int64 `boil:"component_id" json:"component_id" toml:"component_id" yaml:"component_id"` + ComponentType string `boil:"component_type" json:"component_type" toml:"component_type" yaml:"component_type"` + Description null.String `boil:"description" json:"description,omitempty" toml:"description" yaml:"description,omitempty"` + Sequence null.String `boil:"sequence" json:"sequence,omitempty" toml:"sequence" yaml:"sequence,omitempty"` + SequenceMd5sum null.String `boil:"sequence_md5sum" json:"sequence_md5sum,omitempty" toml:"sequence_md5sum" yaml:"sequence_md5sum,omitempty"` + TaxID null.Int64 `boil:"tax_id" json:"tax_id,omitempty" toml:"tax_id" yaml:"tax_id,omitempty"` + Organism null.String `boil:"organism" json:"organism,omitempty" toml:"organism" yaml:"organism,omitempty"` + + R *bioComponentSequenceR `boil:"-" json:"-" toml:"-" yaml:"-"` + L bioComponentSequenceL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var BioComponentSequenceColumns = struct { + ComponentID string + ComponentType string + Description string + Sequence string + SequenceMd5sum string + TaxID string + Organism string +}{ + ComponentID: "component_id", + ComponentType: "component_type", + Description: "description", + Sequence: "sequence", + SequenceMd5sum: "sequence_md5sum", + TaxID: "tax_id", + Organism: "organism", +} + +var BioComponentSequenceTableColumns = struct { + ComponentID string + ComponentType string + Description string + Sequence string + SequenceMd5sum string + TaxID string + Organism string +}{ + ComponentID: "bio_component_sequences.component_id", + ComponentType: "bio_component_sequences.component_type", + Description: "bio_component_sequences.description", + Sequence: "bio_component_sequences.sequence", + SequenceMd5sum: "bio_component_sequences.sequence_md5sum", + TaxID: "bio_component_sequences.tax_id", + Organism: "bio_component_sequences.organism", +} + +// Generated where + +var BioComponentSequenceWhere = struct { + ComponentID whereHelperint64 + ComponentType whereHelperstring + Description whereHelpernull_String + Sequence whereHelpernull_String + SequenceMd5sum whereHelpernull_String + TaxID whereHelpernull_Int64 + Organism whereHelpernull_String +}{ + ComponentID: whereHelperint64{field: "\"bio_component_sequences\".\"component_id\""}, + ComponentType: whereHelperstring{field: "\"bio_component_sequences\".\"component_type\""}, + Description: whereHelpernull_String{field: "\"bio_component_sequences\".\"description\""}, + Sequence: whereHelpernull_String{field: "\"bio_component_sequences\".\"sequence\""}, + SequenceMd5sum: whereHelpernull_String{field: "\"bio_component_sequences\".\"sequence_md5sum\""}, + TaxID: whereHelpernull_Int64{field: "\"bio_component_sequences\".\"tax_id\""}, + Organism: whereHelpernull_String{field: "\"bio_component_sequences\".\"organism\""}, +} + +// BioComponentSequenceRels is where relationship names are stored. +var BioComponentSequenceRels = struct { + ComponentBiotherapeuticComponents string +}{ + ComponentBiotherapeuticComponents: "ComponentBiotherapeuticComponents", +} + +// bioComponentSequenceR is where relationships are stored. +type bioComponentSequenceR struct { + ComponentBiotherapeuticComponents BiotherapeuticComponentSlice `boil:"ComponentBiotherapeuticComponents" json:"ComponentBiotherapeuticComponents" toml:"ComponentBiotherapeuticComponents" yaml:"ComponentBiotherapeuticComponents"` +} + +// NewStruct creates a new relationship struct +func (*bioComponentSequenceR) NewStruct() *bioComponentSequenceR { + return &bioComponentSequenceR{} +} + +func (r *bioComponentSequenceR) GetComponentBiotherapeuticComponents() BiotherapeuticComponentSlice { + if r == nil { + return nil + } + return r.ComponentBiotherapeuticComponents +} + +// bioComponentSequenceL is where Load methods for each relationship are stored. +type bioComponentSequenceL struct{} + +var ( + bioComponentSequenceAllColumns = []string{"component_id", "component_type", "description", "sequence", "sequence_md5sum", "tax_id", "organism"} + bioComponentSequenceColumnsWithoutDefault = []string{"component_id", "component_type"} + bioComponentSequenceColumnsWithDefault = []string{"description", "sequence", "sequence_md5sum", "tax_id", "organism"} + bioComponentSequencePrimaryKeyColumns = []string{"component_id"} + bioComponentSequenceGeneratedColumns = []string{} +) + +type ( + // BioComponentSequenceSlice is an alias for a slice of pointers to BioComponentSequence. + // This should almost always be used instead of []BioComponentSequence. + BioComponentSequenceSlice []*BioComponentSequence + // BioComponentSequenceHook is the signature for custom BioComponentSequence hook methods + BioComponentSequenceHook func(context.Context, boil.ContextExecutor, *BioComponentSequence) error + + bioComponentSequenceQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + bioComponentSequenceType = reflect.TypeOf(&BioComponentSequence{}) + bioComponentSequenceMapping = queries.MakeStructMapping(bioComponentSequenceType) + bioComponentSequencePrimaryKeyMapping, _ = queries.BindMapping(bioComponentSequenceType, bioComponentSequenceMapping, bioComponentSequencePrimaryKeyColumns) + bioComponentSequenceInsertCacheMut sync.RWMutex + bioComponentSequenceInsertCache = make(map[string]insertCache) + bioComponentSequenceUpdateCacheMut sync.RWMutex + bioComponentSequenceUpdateCache = make(map[string]updateCache) + bioComponentSequenceUpsertCacheMut sync.RWMutex + bioComponentSequenceUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var bioComponentSequenceAfterSelectHooks []BioComponentSequenceHook + +var bioComponentSequenceBeforeInsertHooks []BioComponentSequenceHook +var bioComponentSequenceAfterInsertHooks []BioComponentSequenceHook + +var bioComponentSequenceBeforeUpdateHooks []BioComponentSequenceHook +var bioComponentSequenceAfterUpdateHooks []BioComponentSequenceHook + +var bioComponentSequenceBeforeDeleteHooks []BioComponentSequenceHook +var bioComponentSequenceAfterDeleteHooks []BioComponentSequenceHook + +var bioComponentSequenceBeforeUpsertHooks []BioComponentSequenceHook +var bioComponentSequenceAfterUpsertHooks []BioComponentSequenceHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *BioComponentSequence) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range bioComponentSequenceAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *BioComponentSequence) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range bioComponentSequenceBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *BioComponentSequence) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range bioComponentSequenceAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *BioComponentSequence) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range bioComponentSequenceBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *BioComponentSequence) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range bioComponentSequenceAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *BioComponentSequence) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range bioComponentSequenceBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *BioComponentSequence) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range bioComponentSequenceAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *BioComponentSequence) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range bioComponentSequenceBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *BioComponentSequence) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range bioComponentSequenceAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddBioComponentSequenceHook registers your hook function for all future operations. +func AddBioComponentSequenceHook(hookPoint boil.HookPoint, bioComponentSequenceHook BioComponentSequenceHook) { + switch hookPoint { + case boil.AfterSelectHook: + bioComponentSequenceAfterSelectHooks = append(bioComponentSequenceAfterSelectHooks, bioComponentSequenceHook) + case boil.BeforeInsertHook: + bioComponentSequenceBeforeInsertHooks = append(bioComponentSequenceBeforeInsertHooks, bioComponentSequenceHook) + case boil.AfterInsertHook: + bioComponentSequenceAfterInsertHooks = append(bioComponentSequenceAfterInsertHooks, bioComponentSequenceHook) + case boil.BeforeUpdateHook: + bioComponentSequenceBeforeUpdateHooks = append(bioComponentSequenceBeforeUpdateHooks, bioComponentSequenceHook) + case boil.AfterUpdateHook: + bioComponentSequenceAfterUpdateHooks = append(bioComponentSequenceAfterUpdateHooks, bioComponentSequenceHook) + case boil.BeforeDeleteHook: + bioComponentSequenceBeforeDeleteHooks = append(bioComponentSequenceBeforeDeleteHooks, bioComponentSequenceHook) + case boil.AfterDeleteHook: + bioComponentSequenceAfterDeleteHooks = append(bioComponentSequenceAfterDeleteHooks, bioComponentSequenceHook) + case boil.BeforeUpsertHook: + bioComponentSequenceBeforeUpsertHooks = append(bioComponentSequenceBeforeUpsertHooks, bioComponentSequenceHook) + case boil.AfterUpsertHook: + bioComponentSequenceAfterUpsertHooks = append(bioComponentSequenceAfterUpsertHooks, bioComponentSequenceHook) + } +} + +// One returns a single bioComponentSequence record from the query. +func (q bioComponentSequenceQuery) One(ctx context.Context, exec boil.ContextExecutor) (*BioComponentSequence, error) { + o := &BioComponentSequence{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for bio_component_sequences") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all BioComponentSequence records from the query. +func (q bioComponentSequenceQuery) All(ctx context.Context, exec boil.ContextExecutor) (BioComponentSequenceSlice, error) { + var o []*BioComponentSequence + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to BioComponentSequence slice") + } + + if len(bioComponentSequenceAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all BioComponentSequence records in the query. +func (q bioComponentSequenceQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count bio_component_sequences rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q bioComponentSequenceQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if bio_component_sequences exists") + } + + return count > 0, nil +} + +// ComponentBiotherapeuticComponents retrieves all the biotherapeutic_component's BiotherapeuticComponents with an executor via component_id column. +func (o *BioComponentSequence) ComponentBiotherapeuticComponents(mods ...qm.QueryMod) biotherapeuticComponentQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"biotherapeutic_components\".\"component_id\"=?", o.ComponentID), + ) + + return BiotherapeuticComponents(queryMods...) +} + +// LoadComponentBiotherapeuticComponents allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (bioComponentSequenceL) LoadComponentBiotherapeuticComponents(ctx context.Context, e boil.ContextExecutor, singular bool, maybeBioComponentSequence interface{}, mods queries.Applicator) error { + var slice []*BioComponentSequence + var object *BioComponentSequence + + if singular { + object = maybeBioComponentSequence.(*BioComponentSequence) + } else { + slice = *maybeBioComponentSequence.(*[]*BioComponentSequence) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &bioComponentSequenceR{} + } + args = append(args, object.ComponentID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &bioComponentSequenceR{} + } + + for _, a := range args { + if a == obj.ComponentID { + continue Outer + } + } + + args = append(args, obj.ComponentID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`biotherapeutic_components`), + qm.WhereIn(`biotherapeutic_components.component_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load biotherapeutic_components") + } + + var resultSlice []*BiotherapeuticComponent + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice biotherapeutic_components") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on biotherapeutic_components") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for biotherapeutic_components") + } + + if len(biotherapeuticComponentAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.ComponentBiotherapeuticComponents = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &biotherapeuticComponentR{} + } + foreign.R.Component = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.ComponentID == foreign.ComponentID { + local.R.ComponentBiotherapeuticComponents = append(local.R.ComponentBiotherapeuticComponents, foreign) + if foreign.R == nil { + foreign.R = &biotherapeuticComponentR{} + } + foreign.R.Component = local + break + } + } + } + + return nil +} + +// AddComponentBiotherapeuticComponents adds the given related objects to the existing relationships +// of the bio_component_sequence, optionally inserting them as new records. +// Appends related to o.R.ComponentBiotherapeuticComponents. +// Sets related.R.Component appropriately. +func (o *BioComponentSequence) AddComponentBiotherapeuticComponents(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*BiotherapeuticComponent) error { + var err error + for _, rel := range related { + if insert { + rel.ComponentID = o.ComponentID + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"biotherapeutic_components\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"component_id"}), + strmangle.WhereClause("\"", "\"", 0, biotherapeuticComponentPrimaryKeyColumns), + ) + values := []interface{}{o.ComponentID, rel.BiocompID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.ComponentID = o.ComponentID + } + } + + if o.R == nil { + o.R = &bioComponentSequenceR{ + ComponentBiotherapeuticComponents: related, + } + } else { + o.R.ComponentBiotherapeuticComponents = append(o.R.ComponentBiotherapeuticComponents, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &biotherapeuticComponentR{ + Component: o, + } + } else { + rel.R.Component = o + } + } + return nil +} + +// BioComponentSequences retrieves all the records using an executor. +func BioComponentSequences(mods ...qm.QueryMod) bioComponentSequenceQuery { + mods = append(mods, qm.From("\"bio_component_sequences\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"bio_component_sequences\".*"}) + } + + return bioComponentSequenceQuery{q} +} + +// FindBioComponentSequence retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindBioComponentSequence(ctx context.Context, exec boil.ContextExecutor, componentID int64, selectCols ...string) (*BioComponentSequence, error) { + bioComponentSequenceObj := &BioComponentSequence{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"bio_component_sequences\" where \"component_id\"=?", sel, + ) + + q := queries.Raw(query, componentID) + + err := q.Bind(ctx, exec, bioComponentSequenceObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from bio_component_sequences") + } + + if err = bioComponentSequenceObj.doAfterSelectHooks(ctx, exec); err != nil { + return bioComponentSequenceObj, err + } + + return bioComponentSequenceObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *BioComponentSequence) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no bio_component_sequences provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(bioComponentSequenceColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + bioComponentSequenceInsertCacheMut.RLock() + cache, cached := bioComponentSequenceInsertCache[key] + bioComponentSequenceInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + bioComponentSequenceAllColumns, + bioComponentSequenceColumnsWithDefault, + bioComponentSequenceColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(bioComponentSequenceType, bioComponentSequenceMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(bioComponentSequenceType, bioComponentSequenceMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"bio_component_sequences\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"bio_component_sequences\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into bio_component_sequences") + } + + if !cached { + bioComponentSequenceInsertCacheMut.Lock() + bioComponentSequenceInsertCache[key] = cache + bioComponentSequenceInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the BioComponentSequence. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *BioComponentSequence) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + bioComponentSequenceUpdateCacheMut.RLock() + cache, cached := bioComponentSequenceUpdateCache[key] + bioComponentSequenceUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + bioComponentSequenceAllColumns, + bioComponentSequencePrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update bio_component_sequences, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"bio_component_sequences\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, bioComponentSequencePrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(bioComponentSequenceType, bioComponentSequenceMapping, append(wl, bioComponentSequencePrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update bio_component_sequences row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for bio_component_sequences") + } + + if !cached { + bioComponentSequenceUpdateCacheMut.Lock() + bioComponentSequenceUpdateCache[key] = cache + bioComponentSequenceUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q bioComponentSequenceQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for bio_component_sequences") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for bio_component_sequences") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o BioComponentSequenceSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), bioComponentSequencePrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"bio_component_sequences\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, bioComponentSequencePrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in bioComponentSequence slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all bioComponentSequence") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *BioComponentSequence) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no bio_component_sequences provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(bioComponentSequenceColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + bioComponentSequenceUpsertCacheMut.RLock() + cache, cached := bioComponentSequenceUpsertCache[key] + bioComponentSequenceUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + bioComponentSequenceAllColumns, + bioComponentSequenceColumnsWithDefault, + bioComponentSequenceColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + bioComponentSequenceAllColumns, + bioComponentSequencePrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert bio_component_sequences, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(bioComponentSequencePrimaryKeyColumns)) + copy(conflict, bioComponentSequencePrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"bio_component_sequences\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(bioComponentSequenceType, bioComponentSequenceMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(bioComponentSequenceType, bioComponentSequenceMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert bio_component_sequences") + } + + if !cached { + bioComponentSequenceUpsertCacheMut.Lock() + bioComponentSequenceUpsertCache[key] = cache + bioComponentSequenceUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single BioComponentSequence record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *BioComponentSequence) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no BioComponentSequence provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), bioComponentSequencePrimaryKeyMapping) + sql := "DELETE FROM \"bio_component_sequences\" WHERE \"component_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from bio_component_sequences") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for bio_component_sequences") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q bioComponentSequenceQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no bioComponentSequenceQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from bio_component_sequences") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for bio_component_sequences") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o BioComponentSequenceSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(bioComponentSequenceBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), bioComponentSequencePrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"bio_component_sequences\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, bioComponentSequencePrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from bioComponentSequence slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for bio_component_sequences") + } + + if len(bioComponentSequenceAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *BioComponentSequence) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindBioComponentSequence(ctx, exec, o.ComponentID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *BioComponentSequenceSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := BioComponentSequenceSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), bioComponentSequencePrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"bio_component_sequences\".* FROM \"bio_component_sequences\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, bioComponentSequencePrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in BioComponentSequenceSlice") + } + + *o = slice + + return nil +} + +// BioComponentSequenceExists checks if the BioComponentSequence row exists. +func BioComponentSequenceExists(ctx context.Context, exec boil.ContextExecutor, componentID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"bio_component_sequences\" where \"component_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, componentID) + } + row := exec.QueryRowContext(ctx, sql, componentID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if bio_component_sequences exists") + } + + return exists, nil +} diff --git a/models/bioassay_ontology.go b/models/bioassay_ontology.go new file mode 100644 index 0000000..c6f971a --- /dev/null +++ b/models/bioassay_ontology.go @@ -0,0 +1,1388 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// BioassayOntology is an object representing the database table. +type BioassayOntology struct { + BaoID string `boil:"bao_id" json:"bao_id" toml:"bao_id" yaml:"bao_id"` + Label string `boil:"label" json:"label" toml:"label" yaml:"label"` + + R *bioassayOntologyR `boil:"-" json:"-" toml:"-" yaml:"-"` + L bioassayOntologyL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var BioassayOntologyColumns = struct { + BaoID string + Label string +}{ + BaoID: "bao_id", + Label: "label", +} + +var BioassayOntologyTableColumns = struct { + BaoID string + Label string +}{ + BaoID: "bioassay_ontology.bao_id", + Label: "bioassay_ontology.label", +} + +// Generated where + +var BioassayOntologyWhere = struct { + BaoID whereHelperstring + Label whereHelperstring +}{ + BaoID: whereHelperstring{field: "\"bioassay_ontology\".\"bao_id\""}, + Label: whereHelperstring{field: "\"bioassay_ontology\".\"label\""}, +} + +// BioassayOntologyRels is where relationship names are stored. +var BioassayOntologyRels = struct { + BaoEndpointActivities string + BaoFormatAssays string +}{ + BaoEndpointActivities: "BaoEndpointActivities", + BaoFormatAssays: "BaoFormatAssays", +} + +// bioassayOntologyR is where relationships are stored. +type bioassayOntologyR struct { + BaoEndpointActivities ActivitySlice `boil:"BaoEndpointActivities" json:"BaoEndpointActivities" toml:"BaoEndpointActivities" yaml:"BaoEndpointActivities"` + BaoFormatAssays AssaySlice `boil:"BaoFormatAssays" json:"BaoFormatAssays" toml:"BaoFormatAssays" yaml:"BaoFormatAssays"` +} + +// NewStruct creates a new relationship struct +func (*bioassayOntologyR) NewStruct() *bioassayOntologyR { + return &bioassayOntologyR{} +} + +func (r *bioassayOntologyR) GetBaoEndpointActivities() ActivitySlice { + if r == nil { + return nil + } + return r.BaoEndpointActivities +} + +func (r *bioassayOntologyR) GetBaoFormatAssays() AssaySlice { + if r == nil { + return nil + } + return r.BaoFormatAssays +} + +// bioassayOntologyL is where Load methods for each relationship are stored. +type bioassayOntologyL struct{} + +var ( + bioassayOntologyAllColumns = []string{"bao_id", "label"} + bioassayOntologyColumnsWithoutDefault = []string{"bao_id", "label"} + bioassayOntologyColumnsWithDefault = []string{} + bioassayOntologyPrimaryKeyColumns = []string{"bao_id"} + bioassayOntologyGeneratedColumns = []string{} +) + +type ( + // BioassayOntologySlice is an alias for a slice of pointers to BioassayOntology. + // This should almost always be used instead of []BioassayOntology. + BioassayOntologySlice []*BioassayOntology + // BioassayOntologyHook is the signature for custom BioassayOntology hook methods + BioassayOntologyHook func(context.Context, boil.ContextExecutor, *BioassayOntology) error + + bioassayOntologyQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + bioassayOntologyType = reflect.TypeOf(&BioassayOntology{}) + bioassayOntologyMapping = queries.MakeStructMapping(bioassayOntologyType) + bioassayOntologyPrimaryKeyMapping, _ = queries.BindMapping(bioassayOntologyType, bioassayOntologyMapping, bioassayOntologyPrimaryKeyColumns) + bioassayOntologyInsertCacheMut sync.RWMutex + bioassayOntologyInsertCache = make(map[string]insertCache) + bioassayOntologyUpdateCacheMut sync.RWMutex + bioassayOntologyUpdateCache = make(map[string]updateCache) + bioassayOntologyUpsertCacheMut sync.RWMutex + bioassayOntologyUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var bioassayOntologyAfterSelectHooks []BioassayOntologyHook + +var bioassayOntologyBeforeInsertHooks []BioassayOntologyHook +var bioassayOntologyAfterInsertHooks []BioassayOntologyHook + +var bioassayOntologyBeforeUpdateHooks []BioassayOntologyHook +var bioassayOntologyAfterUpdateHooks []BioassayOntologyHook + +var bioassayOntologyBeforeDeleteHooks []BioassayOntologyHook +var bioassayOntologyAfterDeleteHooks []BioassayOntologyHook + +var bioassayOntologyBeforeUpsertHooks []BioassayOntologyHook +var bioassayOntologyAfterUpsertHooks []BioassayOntologyHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *BioassayOntology) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range bioassayOntologyAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *BioassayOntology) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range bioassayOntologyBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *BioassayOntology) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range bioassayOntologyAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *BioassayOntology) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range bioassayOntologyBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *BioassayOntology) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range bioassayOntologyAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *BioassayOntology) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range bioassayOntologyBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *BioassayOntology) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range bioassayOntologyAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *BioassayOntology) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range bioassayOntologyBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *BioassayOntology) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range bioassayOntologyAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddBioassayOntologyHook registers your hook function for all future operations. +func AddBioassayOntologyHook(hookPoint boil.HookPoint, bioassayOntologyHook BioassayOntologyHook) { + switch hookPoint { + case boil.AfterSelectHook: + bioassayOntologyAfterSelectHooks = append(bioassayOntologyAfterSelectHooks, bioassayOntologyHook) + case boil.BeforeInsertHook: + bioassayOntologyBeforeInsertHooks = append(bioassayOntologyBeforeInsertHooks, bioassayOntologyHook) + case boil.AfterInsertHook: + bioassayOntologyAfterInsertHooks = append(bioassayOntologyAfterInsertHooks, bioassayOntologyHook) + case boil.BeforeUpdateHook: + bioassayOntologyBeforeUpdateHooks = append(bioassayOntologyBeforeUpdateHooks, bioassayOntologyHook) + case boil.AfterUpdateHook: + bioassayOntologyAfterUpdateHooks = append(bioassayOntologyAfterUpdateHooks, bioassayOntologyHook) + case boil.BeforeDeleteHook: + bioassayOntologyBeforeDeleteHooks = append(bioassayOntologyBeforeDeleteHooks, bioassayOntologyHook) + case boil.AfterDeleteHook: + bioassayOntologyAfterDeleteHooks = append(bioassayOntologyAfterDeleteHooks, bioassayOntologyHook) + case boil.BeforeUpsertHook: + bioassayOntologyBeforeUpsertHooks = append(bioassayOntologyBeforeUpsertHooks, bioassayOntologyHook) + case boil.AfterUpsertHook: + bioassayOntologyAfterUpsertHooks = append(bioassayOntologyAfterUpsertHooks, bioassayOntologyHook) + } +} + +// One returns a single bioassayOntology record from the query. +func (q bioassayOntologyQuery) One(ctx context.Context, exec boil.ContextExecutor) (*BioassayOntology, error) { + o := &BioassayOntology{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for bioassay_ontology") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all BioassayOntology records from the query. +func (q bioassayOntologyQuery) All(ctx context.Context, exec boil.ContextExecutor) (BioassayOntologySlice, error) { + var o []*BioassayOntology + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to BioassayOntology slice") + } + + if len(bioassayOntologyAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all BioassayOntology records in the query. +func (q bioassayOntologyQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count bioassay_ontology rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q bioassayOntologyQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if bioassay_ontology exists") + } + + return count > 0, nil +} + +// BaoEndpointActivities retrieves all the activity's Activities with an executor via bao_endpoint column. +func (o *BioassayOntology) BaoEndpointActivities(mods ...qm.QueryMod) activityQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"activities\".\"bao_endpoint\"=?", o.BaoID), + ) + + return Activities(queryMods...) +} + +// BaoFormatAssays retrieves all the assay's Assays with an executor via bao_format column. +func (o *BioassayOntology) BaoFormatAssays(mods ...qm.QueryMod) assayQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"assays\".\"bao_format\"=?", o.BaoID), + ) + + return Assays(queryMods...) +} + +// LoadBaoEndpointActivities allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (bioassayOntologyL) LoadBaoEndpointActivities(ctx context.Context, e boil.ContextExecutor, singular bool, maybeBioassayOntology interface{}, mods queries.Applicator) error { + var slice []*BioassayOntology + var object *BioassayOntology + + if singular { + object = maybeBioassayOntology.(*BioassayOntology) + } else { + slice = *maybeBioassayOntology.(*[]*BioassayOntology) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &bioassayOntologyR{} + } + args = append(args, object.BaoID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &bioassayOntologyR{} + } + + for _, a := range args { + if queries.Equal(a, obj.BaoID) { + continue Outer + } + } + + args = append(args, obj.BaoID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`activities`), + qm.WhereIn(`activities.bao_endpoint in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load activities") + } + + var resultSlice []*Activity + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice activities") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on activities") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for activities") + } + + if len(activityAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.BaoEndpointActivities = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &activityR{} + } + foreign.R.BaoEndpointBioassayOntology = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.BaoID, foreign.BaoEndpoint) { + local.R.BaoEndpointActivities = append(local.R.BaoEndpointActivities, foreign) + if foreign.R == nil { + foreign.R = &activityR{} + } + foreign.R.BaoEndpointBioassayOntology = local + break + } + } + } + + return nil +} + +// LoadBaoFormatAssays allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (bioassayOntologyL) LoadBaoFormatAssays(ctx context.Context, e boil.ContextExecutor, singular bool, maybeBioassayOntology interface{}, mods queries.Applicator) error { + var slice []*BioassayOntology + var object *BioassayOntology + + if singular { + object = maybeBioassayOntology.(*BioassayOntology) + } else { + slice = *maybeBioassayOntology.(*[]*BioassayOntology) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &bioassayOntologyR{} + } + args = append(args, object.BaoID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &bioassayOntologyR{} + } + + for _, a := range args { + if queries.Equal(a, obj.BaoID) { + continue Outer + } + } + + args = append(args, obj.BaoID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`assays`), + qm.WhereIn(`assays.bao_format in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load assays") + } + + var resultSlice []*Assay + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice assays") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on assays") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for assays") + } + + if len(assayAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.BaoFormatAssays = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &assayR{} + } + foreign.R.BaoFormatBioassayOntology = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.BaoID, foreign.BaoFormat) { + local.R.BaoFormatAssays = append(local.R.BaoFormatAssays, foreign) + if foreign.R == nil { + foreign.R = &assayR{} + } + foreign.R.BaoFormatBioassayOntology = local + break + } + } + } + + return nil +} + +// AddBaoEndpointActivities adds the given related objects to the existing relationships +// of the bioassay_ontology, optionally inserting them as new records. +// Appends related to o.R.BaoEndpointActivities. +// Sets related.R.BaoEndpointBioassayOntology appropriately. +func (o *BioassayOntology) AddBaoEndpointActivities(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Activity) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.BaoEndpoint, o.BaoID) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"activities\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"bao_endpoint"}), + strmangle.WhereClause("\"", "\"", 0, activityPrimaryKeyColumns), + ) + values := []interface{}{o.BaoID, rel.ActivityID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.BaoEndpoint, o.BaoID) + } + } + + if o.R == nil { + o.R = &bioassayOntologyR{ + BaoEndpointActivities: related, + } + } else { + o.R.BaoEndpointActivities = append(o.R.BaoEndpointActivities, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &activityR{ + BaoEndpointBioassayOntology: o, + } + } else { + rel.R.BaoEndpointBioassayOntology = o + } + } + return nil +} + +// SetBaoEndpointActivities removes all previously related items of the +// bioassay_ontology replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.BaoEndpointBioassayOntology's BaoEndpointActivities accordingly. +// Replaces o.R.BaoEndpointActivities with related. +// Sets related.R.BaoEndpointBioassayOntology's BaoEndpointActivities accordingly. +func (o *BioassayOntology) SetBaoEndpointActivities(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Activity) error { + query := "update \"activities\" set \"bao_endpoint\" = null where \"bao_endpoint\" = ?" + values := []interface{}{o.BaoID} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + if o.R != nil { + for _, rel := range o.R.BaoEndpointActivities { + queries.SetScanner(&rel.BaoEndpoint, nil) + if rel.R == nil { + continue + } + + rel.R.BaoEndpointBioassayOntology = nil + } + o.R.BaoEndpointActivities = nil + } + + return o.AddBaoEndpointActivities(ctx, exec, insert, related...) +} + +// RemoveBaoEndpointActivities relationships from objects passed in. +// Removes related items from R.BaoEndpointActivities (uses pointer comparison, removal does not keep order) +// Sets related.R.BaoEndpointBioassayOntology. +func (o *BioassayOntology) RemoveBaoEndpointActivities(ctx context.Context, exec boil.ContextExecutor, related ...*Activity) error { + if len(related) == 0 { + return nil + } + + var err error + for _, rel := range related { + queries.SetScanner(&rel.BaoEndpoint, nil) + if rel.R != nil { + rel.R.BaoEndpointBioassayOntology = nil + } + if _, err = rel.Update(ctx, exec, boil.Whitelist("bao_endpoint")); err != nil { + return err + } + } + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.BaoEndpointActivities { + if rel != ri { + continue + } + + ln := len(o.R.BaoEndpointActivities) + if ln > 1 && i < ln-1 { + o.R.BaoEndpointActivities[i] = o.R.BaoEndpointActivities[ln-1] + } + o.R.BaoEndpointActivities = o.R.BaoEndpointActivities[:ln-1] + break + } + } + + return nil +} + +// AddBaoFormatAssays adds the given related objects to the existing relationships +// of the bioassay_ontology, optionally inserting them as new records. +// Appends related to o.R.BaoFormatAssays. +// Sets related.R.BaoFormatBioassayOntology appropriately. +func (o *BioassayOntology) AddBaoFormatAssays(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Assay) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.BaoFormat, o.BaoID) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"assays\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"bao_format"}), + strmangle.WhereClause("\"", "\"", 0, assayPrimaryKeyColumns), + ) + values := []interface{}{o.BaoID, rel.AssayID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.BaoFormat, o.BaoID) + } + } + + if o.R == nil { + o.R = &bioassayOntologyR{ + BaoFormatAssays: related, + } + } else { + o.R.BaoFormatAssays = append(o.R.BaoFormatAssays, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &assayR{ + BaoFormatBioassayOntology: o, + } + } else { + rel.R.BaoFormatBioassayOntology = o + } + } + return nil +} + +// SetBaoFormatAssays removes all previously related items of the +// bioassay_ontology replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.BaoFormatBioassayOntology's BaoFormatAssays accordingly. +// Replaces o.R.BaoFormatAssays with related. +// Sets related.R.BaoFormatBioassayOntology's BaoFormatAssays accordingly. +func (o *BioassayOntology) SetBaoFormatAssays(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Assay) error { + query := "update \"assays\" set \"bao_format\" = null where \"bao_format\" = ?" + values := []interface{}{o.BaoID} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + if o.R != nil { + for _, rel := range o.R.BaoFormatAssays { + queries.SetScanner(&rel.BaoFormat, nil) + if rel.R == nil { + continue + } + + rel.R.BaoFormatBioassayOntology = nil + } + o.R.BaoFormatAssays = nil + } + + return o.AddBaoFormatAssays(ctx, exec, insert, related...) +} + +// RemoveBaoFormatAssays relationships from objects passed in. +// Removes related items from R.BaoFormatAssays (uses pointer comparison, removal does not keep order) +// Sets related.R.BaoFormatBioassayOntology. +func (o *BioassayOntology) RemoveBaoFormatAssays(ctx context.Context, exec boil.ContextExecutor, related ...*Assay) error { + if len(related) == 0 { + return nil + } + + var err error + for _, rel := range related { + queries.SetScanner(&rel.BaoFormat, nil) + if rel.R != nil { + rel.R.BaoFormatBioassayOntology = nil + } + if _, err = rel.Update(ctx, exec, boil.Whitelist("bao_format")); err != nil { + return err + } + } + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.BaoFormatAssays { + if rel != ri { + continue + } + + ln := len(o.R.BaoFormatAssays) + if ln > 1 && i < ln-1 { + o.R.BaoFormatAssays[i] = o.R.BaoFormatAssays[ln-1] + } + o.R.BaoFormatAssays = o.R.BaoFormatAssays[:ln-1] + break + } + } + + return nil +} + +// BioassayOntologies retrieves all the records using an executor. +func BioassayOntologies(mods ...qm.QueryMod) bioassayOntologyQuery { + mods = append(mods, qm.From("\"bioassay_ontology\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"bioassay_ontology\".*"}) + } + + return bioassayOntologyQuery{q} +} + +// FindBioassayOntology retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindBioassayOntology(ctx context.Context, exec boil.ContextExecutor, baoID string, selectCols ...string) (*BioassayOntology, error) { + bioassayOntologyObj := &BioassayOntology{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"bioassay_ontology\" where \"bao_id\"=?", sel, + ) + + q := queries.Raw(query, baoID) + + err := q.Bind(ctx, exec, bioassayOntologyObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from bioassay_ontology") + } + + if err = bioassayOntologyObj.doAfterSelectHooks(ctx, exec); err != nil { + return bioassayOntologyObj, err + } + + return bioassayOntologyObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *BioassayOntology) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no bioassay_ontology provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(bioassayOntologyColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + bioassayOntologyInsertCacheMut.RLock() + cache, cached := bioassayOntologyInsertCache[key] + bioassayOntologyInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + bioassayOntologyAllColumns, + bioassayOntologyColumnsWithDefault, + bioassayOntologyColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(bioassayOntologyType, bioassayOntologyMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(bioassayOntologyType, bioassayOntologyMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"bioassay_ontology\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"bioassay_ontology\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into bioassay_ontology") + } + + if !cached { + bioassayOntologyInsertCacheMut.Lock() + bioassayOntologyInsertCache[key] = cache + bioassayOntologyInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the BioassayOntology. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *BioassayOntology) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + bioassayOntologyUpdateCacheMut.RLock() + cache, cached := bioassayOntologyUpdateCache[key] + bioassayOntologyUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + bioassayOntologyAllColumns, + bioassayOntologyPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update bioassay_ontology, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"bioassay_ontology\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, bioassayOntologyPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(bioassayOntologyType, bioassayOntologyMapping, append(wl, bioassayOntologyPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update bioassay_ontology row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for bioassay_ontology") + } + + if !cached { + bioassayOntologyUpdateCacheMut.Lock() + bioassayOntologyUpdateCache[key] = cache + bioassayOntologyUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q bioassayOntologyQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for bioassay_ontology") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for bioassay_ontology") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o BioassayOntologySlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), bioassayOntologyPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"bioassay_ontology\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, bioassayOntologyPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in bioassayOntology slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all bioassayOntology") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *BioassayOntology) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no bioassay_ontology provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(bioassayOntologyColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + bioassayOntologyUpsertCacheMut.RLock() + cache, cached := bioassayOntologyUpsertCache[key] + bioassayOntologyUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + bioassayOntologyAllColumns, + bioassayOntologyColumnsWithDefault, + bioassayOntologyColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + bioassayOntologyAllColumns, + bioassayOntologyPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert bioassay_ontology, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(bioassayOntologyPrimaryKeyColumns)) + copy(conflict, bioassayOntologyPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"bioassay_ontology\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(bioassayOntologyType, bioassayOntologyMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(bioassayOntologyType, bioassayOntologyMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert bioassay_ontology") + } + + if !cached { + bioassayOntologyUpsertCacheMut.Lock() + bioassayOntologyUpsertCache[key] = cache + bioassayOntologyUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single BioassayOntology record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *BioassayOntology) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no BioassayOntology provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), bioassayOntologyPrimaryKeyMapping) + sql := "DELETE FROM \"bioassay_ontology\" WHERE \"bao_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from bioassay_ontology") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for bioassay_ontology") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q bioassayOntologyQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no bioassayOntologyQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from bioassay_ontology") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for bioassay_ontology") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o BioassayOntologySlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(bioassayOntologyBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), bioassayOntologyPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"bioassay_ontology\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, bioassayOntologyPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from bioassayOntology slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for bioassay_ontology") + } + + if len(bioassayOntologyAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *BioassayOntology) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindBioassayOntology(ctx, exec, o.BaoID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *BioassayOntologySlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := BioassayOntologySlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), bioassayOntologyPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"bioassay_ontology\".* FROM \"bioassay_ontology\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, bioassayOntologyPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in BioassayOntologySlice") + } + + *o = slice + + return nil +} + +// BioassayOntologyExists checks if the BioassayOntology row exists. +func BioassayOntologyExists(ctx context.Context, exec boil.ContextExecutor, baoID string) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"bioassay_ontology\" where \"bao_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, baoID) + } + row := exec.QueryRowContext(ctx, sql, baoID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if bioassay_ontology exists") + } + + return exists, nil +} diff --git a/models/biotherapeutic_components.go b/models/biotherapeutic_components.go new file mode 100644 index 0000000..168bb4e --- /dev/null +++ b/models/biotherapeutic_components.go @@ -0,0 +1,1241 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// BiotherapeuticComponent is an object representing the database table. +type BiotherapeuticComponent struct { + BiocompID int64 `boil:"biocomp_id" json:"biocomp_id" toml:"biocomp_id" yaml:"biocomp_id"` + Molregno int64 `boil:"molregno" json:"molregno" toml:"molregno" yaml:"molregno"` + ComponentID int64 `boil:"component_id" json:"component_id" toml:"component_id" yaml:"component_id"` + + R *biotherapeuticComponentR `boil:"-" json:"-" toml:"-" yaml:"-"` + L biotherapeuticComponentL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var BiotherapeuticComponentColumns = struct { + BiocompID string + Molregno string + ComponentID string +}{ + BiocompID: "biocomp_id", + Molregno: "molregno", + ComponentID: "component_id", +} + +var BiotherapeuticComponentTableColumns = struct { + BiocompID string + Molregno string + ComponentID string +}{ + BiocompID: "biotherapeutic_components.biocomp_id", + Molregno: "biotherapeutic_components.molregno", + ComponentID: "biotherapeutic_components.component_id", +} + +// Generated where + +var BiotherapeuticComponentWhere = struct { + BiocompID whereHelperint64 + Molregno whereHelperint64 + ComponentID whereHelperint64 +}{ + BiocompID: whereHelperint64{field: "\"biotherapeutic_components\".\"biocomp_id\""}, + Molregno: whereHelperint64{field: "\"biotherapeutic_components\".\"molregno\""}, + ComponentID: whereHelperint64{field: "\"biotherapeutic_components\".\"component_id\""}, +} + +// BiotherapeuticComponentRels is where relationship names are stored. +var BiotherapeuticComponentRels = struct { + MolregnoBiotherapeutic string + Component string +}{ + MolregnoBiotherapeutic: "MolregnoBiotherapeutic", + Component: "Component", +} + +// biotherapeuticComponentR is where relationships are stored. +type biotherapeuticComponentR struct { + MolregnoBiotherapeutic *Biotherapeutic `boil:"MolregnoBiotherapeutic" json:"MolregnoBiotherapeutic" toml:"MolregnoBiotherapeutic" yaml:"MolregnoBiotherapeutic"` + Component *BioComponentSequence `boil:"Component" json:"Component" toml:"Component" yaml:"Component"` +} + +// NewStruct creates a new relationship struct +func (*biotherapeuticComponentR) NewStruct() *biotherapeuticComponentR { + return &biotherapeuticComponentR{} +} + +func (r *biotherapeuticComponentR) GetMolregnoBiotherapeutic() *Biotherapeutic { + if r == nil { + return nil + } + return r.MolregnoBiotherapeutic +} + +func (r *biotherapeuticComponentR) GetComponent() *BioComponentSequence { + if r == nil { + return nil + } + return r.Component +} + +// biotherapeuticComponentL is where Load methods for each relationship are stored. +type biotherapeuticComponentL struct{} + +var ( + biotherapeuticComponentAllColumns = []string{"biocomp_id", "molregno", "component_id"} + biotherapeuticComponentColumnsWithoutDefault = []string{"biocomp_id", "molregno", "component_id"} + biotherapeuticComponentColumnsWithDefault = []string{} + biotherapeuticComponentPrimaryKeyColumns = []string{"biocomp_id"} + biotherapeuticComponentGeneratedColumns = []string{} +) + +type ( + // BiotherapeuticComponentSlice is an alias for a slice of pointers to BiotherapeuticComponent. + // This should almost always be used instead of []BiotherapeuticComponent. + BiotherapeuticComponentSlice []*BiotherapeuticComponent + // BiotherapeuticComponentHook is the signature for custom BiotherapeuticComponent hook methods + BiotherapeuticComponentHook func(context.Context, boil.ContextExecutor, *BiotherapeuticComponent) error + + biotherapeuticComponentQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + biotherapeuticComponentType = reflect.TypeOf(&BiotherapeuticComponent{}) + biotherapeuticComponentMapping = queries.MakeStructMapping(biotherapeuticComponentType) + biotherapeuticComponentPrimaryKeyMapping, _ = queries.BindMapping(biotherapeuticComponentType, biotherapeuticComponentMapping, biotherapeuticComponentPrimaryKeyColumns) + biotherapeuticComponentInsertCacheMut sync.RWMutex + biotherapeuticComponentInsertCache = make(map[string]insertCache) + biotherapeuticComponentUpdateCacheMut sync.RWMutex + biotherapeuticComponentUpdateCache = make(map[string]updateCache) + biotherapeuticComponentUpsertCacheMut sync.RWMutex + biotherapeuticComponentUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var biotherapeuticComponentAfterSelectHooks []BiotherapeuticComponentHook + +var biotherapeuticComponentBeforeInsertHooks []BiotherapeuticComponentHook +var biotherapeuticComponentAfterInsertHooks []BiotherapeuticComponentHook + +var biotherapeuticComponentBeforeUpdateHooks []BiotherapeuticComponentHook +var biotherapeuticComponentAfterUpdateHooks []BiotherapeuticComponentHook + +var biotherapeuticComponentBeforeDeleteHooks []BiotherapeuticComponentHook +var biotherapeuticComponentAfterDeleteHooks []BiotherapeuticComponentHook + +var biotherapeuticComponentBeforeUpsertHooks []BiotherapeuticComponentHook +var biotherapeuticComponentAfterUpsertHooks []BiotherapeuticComponentHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *BiotherapeuticComponent) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range biotherapeuticComponentAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *BiotherapeuticComponent) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range biotherapeuticComponentBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *BiotherapeuticComponent) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range biotherapeuticComponentAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *BiotherapeuticComponent) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range biotherapeuticComponentBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *BiotherapeuticComponent) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range biotherapeuticComponentAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *BiotherapeuticComponent) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range biotherapeuticComponentBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *BiotherapeuticComponent) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range biotherapeuticComponentAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *BiotherapeuticComponent) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range biotherapeuticComponentBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *BiotherapeuticComponent) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range biotherapeuticComponentAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddBiotherapeuticComponentHook registers your hook function for all future operations. +func AddBiotherapeuticComponentHook(hookPoint boil.HookPoint, biotherapeuticComponentHook BiotherapeuticComponentHook) { + switch hookPoint { + case boil.AfterSelectHook: + biotherapeuticComponentAfterSelectHooks = append(biotherapeuticComponentAfterSelectHooks, biotherapeuticComponentHook) + case boil.BeforeInsertHook: + biotherapeuticComponentBeforeInsertHooks = append(biotherapeuticComponentBeforeInsertHooks, biotherapeuticComponentHook) + case boil.AfterInsertHook: + biotherapeuticComponentAfterInsertHooks = append(biotherapeuticComponentAfterInsertHooks, biotherapeuticComponentHook) + case boil.BeforeUpdateHook: + biotherapeuticComponentBeforeUpdateHooks = append(biotherapeuticComponentBeforeUpdateHooks, biotherapeuticComponentHook) + case boil.AfterUpdateHook: + biotherapeuticComponentAfterUpdateHooks = append(biotherapeuticComponentAfterUpdateHooks, biotherapeuticComponentHook) + case boil.BeforeDeleteHook: + biotherapeuticComponentBeforeDeleteHooks = append(biotherapeuticComponentBeforeDeleteHooks, biotherapeuticComponentHook) + case boil.AfterDeleteHook: + biotherapeuticComponentAfterDeleteHooks = append(biotherapeuticComponentAfterDeleteHooks, biotherapeuticComponentHook) + case boil.BeforeUpsertHook: + biotherapeuticComponentBeforeUpsertHooks = append(biotherapeuticComponentBeforeUpsertHooks, biotherapeuticComponentHook) + case boil.AfterUpsertHook: + biotherapeuticComponentAfterUpsertHooks = append(biotherapeuticComponentAfterUpsertHooks, biotherapeuticComponentHook) + } +} + +// One returns a single biotherapeuticComponent record from the query. +func (q biotherapeuticComponentQuery) One(ctx context.Context, exec boil.ContextExecutor) (*BiotherapeuticComponent, error) { + o := &BiotherapeuticComponent{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for biotherapeutic_components") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all BiotherapeuticComponent records from the query. +func (q biotherapeuticComponentQuery) All(ctx context.Context, exec boil.ContextExecutor) (BiotherapeuticComponentSlice, error) { + var o []*BiotherapeuticComponent + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to BiotherapeuticComponent slice") + } + + if len(biotherapeuticComponentAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all BiotherapeuticComponent records in the query. +func (q biotherapeuticComponentQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count biotherapeutic_components rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q biotherapeuticComponentQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if biotherapeutic_components exists") + } + + return count > 0, nil +} + +// MolregnoBiotherapeutic pointed to by the foreign key. +func (o *BiotherapeuticComponent) MolregnoBiotherapeutic(mods ...qm.QueryMod) biotherapeuticQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"molregno\" = ?", o.Molregno), + } + + queryMods = append(queryMods, mods...) + + return Biotherapeutics(queryMods...) +} + +// Component pointed to by the foreign key. +func (o *BiotherapeuticComponent) Component(mods ...qm.QueryMod) bioComponentSequenceQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"component_id\" = ?", o.ComponentID), + } + + queryMods = append(queryMods, mods...) + + return BioComponentSequences(queryMods...) +} + +// LoadMolregnoBiotherapeutic allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (biotherapeuticComponentL) LoadMolregnoBiotherapeutic(ctx context.Context, e boil.ContextExecutor, singular bool, maybeBiotherapeuticComponent interface{}, mods queries.Applicator) error { + var slice []*BiotherapeuticComponent + var object *BiotherapeuticComponent + + if singular { + object = maybeBiotherapeuticComponent.(*BiotherapeuticComponent) + } else { + slice = *maybeBiotherapeuticComponent.(*[]*BiotherapeuticComponent) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &biotherapeuticComponentR{} + } + args = append(args, object.Molregno) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &biotherapeuticComponentR{} + } + + for _, a := range args { + if a == obj.Molregno { + continue Outer + } + } + + args = append(args, obj.Molregno) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`biotherapeutics`), + qm.WhereIn(`biotherapeutics.molregno in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load Biotherapeutic") + } + + var resultSlice []*Biotherapeutic + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice Biotherapeutic") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for biotherapeutics") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for biotherapeutics") + } + + if len(biotherapeuticComponentAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.MolregnoBiotherapeutic = foreign + if foreign.R == nil { + foreign.R = &biotherapeuticR{} + } + foreign.R.MolregnoBiotherapeuticComponents = append(foreign.R.MolregnoBiotherapeuticComponents, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.Molregno == foreign.Molregno { + local.R.MolregnoBiotherapeutic = foreign + if foreign.R == nil { + foreign.R = &biotherapeuticR{} + } + foreign.R.MolregnoBiotherapeuticComponents = append(foreign.R.MolregnoBiotherapeuticComponents, local) + break + } + } + } + + return nil +} + +// LoadComponent allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (biotherapeuticComponentL) LoadComponent(ctx context.Context, e boil.ContextExecutor, singular bool, maybeBiotherapeuticComponent interface{}, mods queries.Applicator) error { + var slice []*BiotherapeuticComponent + var object *BiotherapeuticComponent + + if singular { + object = maybeBiotherapeuticComponent.(*BiotherapeuticComponent) + } else { + slice = *maybeBiotherapeuticComponent.(*[]*BiotherapeuticComponent) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &biotherapeuticComponentR{} + } + args = append(args, object.ComponentID) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &biotherapeuticComponentR{} + } + + for _, a := range args { + if a == obj.ComponentID { + continue Outer + } + } + + args = append(args, obj.ComponentID) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`bio_component_sequences`), + qm.WhereIn(`bio_component_sequences.component_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load BioComponentSequence") + } + + var resultSlice []*BioComponentSequence + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice BioComponentSequence") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for bio_component_sequences") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for bio_component_sequences") + } + + if len(biotherapeuticComponentAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Component = foreign + if foreign.R == nil { + foreign.R = &bioComponentSequenceR{} + } + foreign.R.ComponentBiotherapeuticComponents = append(foreign.R.ComponentBiotherapeuticComponents, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.ComponentID == foreign.ComponentID { + local.R.Component = foreign + if foreign.R == nil { + foreign.R = &bioComponentSequenceR{} + } + foreign.R.ComponentBiotherapeuticComponents = append(foreign.R.ComponentBiotherapeuticComponents, local) + break + } + } + } + + return nil +} + +// SetMolregnoBiotherapeutic of the biotherapeuticComponent to the related item. +// Sets o.R.MolregnoBiotherapeutic to related. +// Adds o to related.R.MolregnoBiotherapeuticComponents. +func (o *BiotherapeuticComponent) SetMolregnoBiotherapeutic(ctx context.Context, exec boil.ContextExecutor, insert bool, related *Biotherapeutic) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"biotherapeutic_components\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"molregno"}), + strmangle.WhereClause("\"", "\"", 0, biotherapeuticComponentPrimaryKeyColumns), + ) + values := []interface{}{related.Molregno, o.BiocompID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.Molregno = related.Molregno + if o.R == nil { + o.R = &biotherapeuticComponentR{ + MolregnoBiotherapeutic: related, + } + } else { + o.R.MolregnoBiotherapeutic = related + } + + if related.R == nil { + related.R = &biotherapeuticR{ + MolregnoBiotherapeuticComponents: BiotherapeuticComponentSlice{o}, + } + } else { + related.R.MolregnoBiotherapeuticComponents = append(related.R.MolregnoBiotherapeuticComponents, o) + } + + return nil +} + +// SetComponent of the biotherapeuticComponent to the related item. +// Sets o.R.Component to related. +// Adds o to related.R.ComponentBiotherapeuticComponents. +func (o *BiotherapeuticComponent) SetComponent(ctx context.Context, exec boil.ContextExecutor, insert bool, related *BioComponentSequence) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"biotherapeutic_components\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"component_id"}), + strmangle.WhereClause("\"", "\"", 0, biotherapeuticComponentPrimaryKeyColumns), + ) + values := []interface{}{related.ComponentID, o.BiocompID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.ComponentID = related.ComponentID + if o.R == nil { + o.R = &biotherapeuticComponentR{ + Component: related, + } + } else { + o.R.Component = related + } + + if related.R == nil { + related.R = &bioComponentSequenceR{ + ComponentBiotherapeuticComponents: BiotherapeuticComponentSlice{o}, + } + } else { + related.R.ComponentBiotherapeuticComponents = append(related.R.ComponentBiotherapeuticComponents, o) + } + + return nil +} + +// BiotherapeuticComponents retrieves all the records using an executor. +func BiotherapeuticComponents(mods ...qm.QueryMod) biotherapeuticComponentQuery { + mods = append(mods, qm.From("\"biotherapeutic_components\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"biotherapeutic_components\".*"}) + } + + return biotherapeuticComponentQuery{q} +} + +// FindBiotherapeuticComponent retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindBiotherapeuticComponent(ctx context.Context, exec boil.ContextExecutor, biocompID int64, selectCols ...string) (*BiotherapeuticComponent, error) { + biotherapeuticComponentObj := &BiotherapeuticComponent{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"biotherapeutic_components\" where \"biocomp_id\"=?", sel, + ) + + q := queries.Raw(query, biocompID) + + err := q.Bind(ctx, exec, biotherapeuticComponentObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from biotherapeutic_components") + } + + if err = biotherapeuticComponentObj.doAfterSelectHooks(ctx, exec); err != nil { + return biotherapeuticComponentObj, err + } + + return biotherapeuticComponentObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *BiotherapeuticComponent) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no biotherapeutic_components provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(biotherapeuticComponentColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + biotherapeuticComponentInsertCacheMut.RLock() + cache, cached := biotherapeuticComponentInsertCache[key] + biotherapeuticComponentInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + biotherapeuticComponentAllColumns, + biotherapeuticComponentColumnsWithDefault, + biotherapeuticComponentColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(biotherapeuticComponentType, biotherapeuticComponentMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(biotherapeuticComponentType, biotherapeuticComponentMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"biotherapeutic_components\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"biotherapeutic_components\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into biotherapeutic_components") + } + + if !cached { + biotherapeuticComponentInsertCacheMut.Lock() + biotherapeuticComponentInsertCache[key] = cache + biotherapeuticComponentInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the BiotherapeuticComponent. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *BiotherapeuticComponent) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + biotherapeuticComponentUpdateCacheMut.RLock() + cache, cached := biotherapeuticComponentUpdateCache[key] + biotherapeuticComponentUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + biotherapeuticComponentAllColumns, + biotherapeuticComponentPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update biotherapeutic_components, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"biotherapeutic_components\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, biotherapeuticComponentPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(biotherapeuticComponentType, biotherapeuticComponentMapping, append(wl, biotherapeuticComponentPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update biotherapeutic_components row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for biotherapeutic_components") + } + + if !cached { + biotherapeuticComponentUpdateCacheMut.Lock() + biotherapeuticComponentUpdateCache[key] = cache + biotherapeuticComponentUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q biotherapeuticComponentQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for biotherapeutic_components") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for biotherapeutic_components") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o BiotherapeuticComponentSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), biotherapeuticComponentPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"biotherapeutic_components\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, biotherapeuticComponentPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in biotherapeuticComponent slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all biotherapeuticComponent") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *BiotherapeuticComponent) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no biotherapeutic_components provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(biotherapeuticComponentColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + biotherapeuticComponentUpsertCacheMut.RLock() + cache, cached := biotherapeuticComponentUpsertCache[key] + biotherapeuticComponentUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + biotherapeuticComponentAllColumns, + biotherapeuticComponentColumnsWithDefault, + biotherapeuticComponentColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + biotherapeuticComponentAllColumns, + biotherapeuticComponentPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert biotherapeutic_components, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(biotherapeuticComponentPrimaryKeyColumns)) + copy(conflict, biotherapeuticComponentPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"biotherapeutic_components\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(biotherapeuticComponentType, biotherapeuticComponentMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(biotherapeuticComponentType, biotherapeuticComponentMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert biotherapeutic_components") + } + + if !cached { + biotherapeuticComponentUpsertCacheMut.Lock() + biotherapeuticComponentUpsertCache[key] = cache + biotherapeuticComponentUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single BiotherapeuticComponent record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *BiotherapeuticComponent) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no BiotherapeuticComponent provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), biotherapeuticComponentPrimaryKeyMapping) + sql := "DELETE FROM \"biotherapeutic_components\" WHERE \"biocomp_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from biotherapeutic_components") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for biotherapeutic_components") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q biotherapeuticComponentQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no biotherapeuticComponentQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from biotherapeutic_components") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for biotherapeutic_components") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o BiotherapeuticComponentSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(biotherapeuticComponentBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), biotherapeuticComponentPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"biotherapeutic_components\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, biotherapeuticComponentPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from biotherapeuticComponent slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for biotherapeutic_components") + } + + if len(biotherapeuticComponentAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *BiotherapeuticComponent) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindBiotherapeuticComponent(ctx, exec, o.BiocompID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *BiotherapeuticComponentSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := BiotherapeuticComponentSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), biotherapeuticComponentPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"biotherapeutic_components\".* FROM \"biotherapeutic_components\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, biotherapeuticComponentPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in BiotherapeuticComponentSlice") + } + + *o = slice + + return nil +} + +// BiotherapeuticComponentExists checks if the BiotherapeuticComponent row exists. +func BiotherapeuticComponentExists(ctx context.Context, exec boil.ContextExecutor, biocompID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"biotherapeutic_components\" where \"biocomp_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, biocompID) + } + row := exec.QueryRowContext(ctx, sql, biocompID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if biotherapeutic_components exists") + } + + return exists, nil +} diff --git a/models/biotherapeutics.go b/models/biotherapeutics.go new file mode 100644 index 0000000..cf13642 --- /dev/null +++ b/models/biotherapeutics.go @@ -0,0 +1,1245 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// Biotherapeutic is an object representing the database table. +type Biotherapeutic struct { + Molregno int64 `boil:"molregno" json:"molregno" toml:"molregno" yaml:"molregno"` + Description null.String `boil:"description" json:"description,omitempty" toml:"description" yaml:"description,omitempty"` + HelmNotation null.String `boil:"helm_notation" json:"helm_notation,omitempty" toml:"helm_notation" yaml:"helm_notation,omitempty"` + + R *biotherapeuticR `boil:"-" json:"-" toml:"-" yaml:"-"` + L biotherapeuticL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var BiotherapeuticColumns = struct { + Molregno string + Description string + HelmNotation string +}{ + Molregno: "molregno", + Description: "description", + HelmNotation: "helm_notation", +} + +var BiotherapeuticTableColumns = struct { + Molregno string + Description string + HelmNotation string +}{ + Molregno: "biotherapeutics.molregno", + Description: "biotherapeutics.description", + HelmNotation: "biotherapeutics.helm_notation", +} + +// Generated where + +var BiotherapeuticWhere = struct { + Molregno whereHelperint64 + Description whereHelpernull_String + HelmNotation whereHelpernull_String +}{ + Molregno: whereHelperint64{field: "\"biotherapeutics\".\"molregno\""}, + Description: whereHelpernull_String{field: "\"biotherapeutics\".\"description\""}, + HelmNotation: whereHelpernull_String{field: "\"biotherapeutics\".\"helm_notation\""}, +} + +// BiotherapeuticRels is where relationship names are stored. +var BiotherapeuticRels = struct { + MolregnoMoleculeDictionary string + MolregnoBiotherapeuticComponents string +}{ + MolregnoMoleculeDictionary: "MolregnoMoleculeDictionary", + MolregnoBiotherapeuticComponents: "MolregnoBiotherapeuticComponents", +} + +// biotherapeuticR is where relationships are stored. +type biotherapeuticR struct { + MolregnoMoleculeDictionary *MoleculeDictionary `boil:"MolregnoMoleculeDictionary" json:"MolregnoMoleculeDictionary" toml:"MolregnoMoleculeDictionary" yaml:"MolregnoMoleculeDictionary"` + MolregnoBiotherapeuticComponents BiotherapeuticComponentSlice `boil:"MolregnoBiotherapeuticComponents" json:"MolregnoBiotherapeuticComponents" toml:"MolregnoBiotherapeuticComponents" yaml:"MolregnoBiotherapeuticComponents"` +} + +// NewStruct creates a new relationship struct +func (*biotherapeuticR) NewStruct() *biotherapeuticR { + return &biotherapeuticR{} +} + +func (r *biotherapeuticR) GetMolregnoMoleculeDictionary() *MoleculeDictionary { + if r == nil { + return nil + } + return r.MolregnoMoleculeDictionary +} + +func (r *biotherapeuticR) GetMolregnoBiotherapeuticComponents() BiotherapeuticComponentSlice { + if r == nil { + return nil + } + return r.MolregnoBiotherapeuticComponents +} + +// biotherapeuticL is where Load methods for each relationship are stored. +type biotherapeuticL struct{} + +var ( + biotherapeuticAllColumns = []string{"molregno", "description", "helm_notation"} + biotherapeuticColumnsWithoutDefault = []string{"molregno"} + biotherapeuticColumnsWithDefault = []string{"description", "helm_notation"} + biotherapeuticPrimaryKeyColumns = []string{"molregno"} + biotherapeuticGeneratedColumns = []string{} +) + +type ( + // BiotherapeuticSlice is an alias for a slice of pointers to Biotherapeutic. + // This should almost always be used instead of []Biotherapeutic. + BiotherapeuticSlice []*Biotherapeutic + // BiotherapeuticHook is the signature for custom Biotherapeutic hook methods + BiotherapeuticHook func(context.Context, boil.ContextExecutor, *Biotherapeutic) error + + biotherapeuticQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + biotherapeuticType = reflect.TypeOf(&Biotherapeutic{}) + biotherapeuticMapping = queries.MakeStructMapping(biotherapeuticType) + biotherapeuticPrimaryKeyMapping, _ = queries.BindMapping(biotherapeuticType, biotherapeuticMapping, biotherapeuticPrimaryKeyColumns) + biotherapeuticInsertCacheMut sync.RWMutex + biotherapeuticInsertCache = make(map[string]insertCache) + biotherapeuticUpdateCacheMut sync.RWMutex + biotherapeuticUpdateCache = make(map[string]updateCache) + biotherapeuticUpsertCacheMut sync.RWMutex + biotherapeuticUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var biotherapeuticAfterSelectHooks []BiotherapeuticHook + +var biotherapeuticBeforeInsertHooks []BiotherapeuticHook +var biotherapeuticAfterInsertHooks []BiotherapeuticHook + +var biotherapeuticBeforeUpdateHooks []BiotherapeuticHook +var biotherapeuticAfterUpdateHooks []BiotherapeuticHook + +var biotherapeuticBeforeDeleteHooks []BiotherapeuticHook +var biotherapeuticAfterDeleteHooks []BiotherapeuticHook + +var biotherapeuticBeforeUpsertHooks []BiotherapeuticHook +var biotherapeuticAfterUpsertHooks []BiotherapeuticHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *Biotherapeutic) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range biotherapeuticAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *Biotherapeutic) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range biotherapeuticBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *Biotherapeutic) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range biotherapeuticAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *Biotherapeutic) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range biotherapeuticBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *Biotherapeutic) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range biotherapeuticAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *Biotherapeutic) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range biotherapeuticBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *Biotherapeutic) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range biotherapeuticAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *Biotherapeutic) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range biotherapeuticBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *Biotherapeutic) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range biotherapeuticAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddBiotherapeuticHook registers your hook function for all future operations. +func AddBiotherapeuticHook(hookPoint boil.HookPoint, biotherapeuticHook BiotherapeuticHook) { + switch hookPoint { + case boil.AfterSelectHook: + biotherapeuticAfterSelectHooks = append(biotherapeuticAfterSelectHooks, biotherapeuticHook) + case boil.BeforeInsertHook: + biotherapeuticBeforeInsertHooks = append(biotherapeuticBeforeInsertHooks, biotherapeuticHook) + case boil.AfterInsertHook: + biotherapeuticAfterInsertHooks = append(biotherapeuticAfterInsertHooks, biotherapeuticHook) + case boil.BeforeUpdateHook: + biotherapeuticBeforeUpdateHooks = append(biotherapeuticBeforeUpdateHooks, biotherapeuticHook) + case boil.AfterUpdateHook: + biotherapeuticAfterUpdateHooks = append(biotherapeuticAfterUpdateHooks, biotherapeuticHook) + case boil.BeforeDeleteHook: + biotherapeuticBeforeDeleteHooks = append(biotherapeuticBeforeDeleteHooks, biotherapeuticHook) + case boil.AfterDeleteHook: + biotherapeuticAfterDeleteHooks = append(biotherapeuticAfterDeleteHooks, biotherapeuticHook) + case boil.BeforeUpsertHook: + biotherapeuticBeforeUpsertHooks = append(biotherapeuticBeforeUpsertHooks, biotherapeuticHook) + case boil.AfterUpsertHook: + biotherapeuticAfterUpsertHooks = append(biotherapeuticAfterUpsertHooks, biotherapeuticHook) + } +} + +// One returns a single biotherapeutic record from the query. +func (q biotherapeuticQuery) One(ctx context.Context, exec boil.ContextExecutor) (*Biotherapeutic, error) { + o := &Biotherapeutic{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for biotherapeutics") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all Biotherapeutic records from the query. +func (q biotherapeuticQuery) All(ctx context.Context, exec boil.ContextExecutor) (BiotherapeuticSlice, error) { + var o []*Biotherapeutic + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to Biotherapeutic slice") + } + + if len(biotherapeuticAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all Biotherapeutic records in the query. +func (q biotherapeuticQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count biotherapeutics rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q biotherapeuticQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if biotherapeutics exists") + } + + return count > 0, nil +} + +// MolregnoMoleculeDictionary pointed to by the foreign key. +func (o *Biotherapeutic) MolregnoMoleculeDictionary(mods ...qm.QueryMod) moleculeDictionaryQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"molregno\" = ?", o.Molregno), + } + + queryMods = append(queryMods, mods...) + + return MoleculeDictionaries(queryMods...) +} + +// MolregnoBiotherapeuticComponents retrieves all the biotherapeutic_component's BiotherapeuticComponents with an executor via molregno column. +func (o *Biotherapeutic) MolregnoBiotherapeuticComponents(mods ...qm.QueryMod) biotherapeuticComponentQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"biotherapeutic_components\".\"molregno\"=?", o.Molregno), + ) + + return BiotherapeuticComponents(queryMods...) +} + +// LoadMolregnoMoleculeDictionary allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (biotherapeuticL) LoadMolregnoMoleculeDictionary(ctx context.Context, e boil.ContextExecutor, singular bool, maybeBiotherapeutic interface{}, mods queries.Applicator) error { + var slice []*Biotherapeutic + var object *Biotherapeutic + + if singular { + object = maybeBiotherapeutic.(*Biotherapeutic) + } else { + slice = *maybeBiotherapeutic.(*[]*Biotherapeutic) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &biotherapeuticR{} + } + args = append(args, object.Molregno) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &biotherapeuticR{} + } + + for _, a := range args { + if a == obj.Molregno { + continue Outer + } + } + + args = append(args, obj.Molregno) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`molecule_dictionary`), + qm.WhereIn(`molecule_dictionary.molregno in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load MoleculeDictionary") + } + + var resultSlice []*MoleculeDictionary + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice MoleculeDictionary") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for molecule_dictionary") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for molecule_dictionary") + } + + if len(biotherapeuticAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.MolregnoMoleculeDictionary = foreign + if foreign.R == nil { + foreign.R = &moleculeDictionaryR{} + } + foreign.R.MolregnoBiotherapeutic = object + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.Molregno == foreign.Molregno { + local.R.MolregnoMoleculeDictionary = foreign + if foreign.R == nil { + foreign.R = &moleculeDictionaryR{} + } + foreign.R.MolregnoBiotherapeutic = local + break + } + } + } + + return nil +} + +// LoadMolregnoBiotherapeuticComponents allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (biotherapeuticL) LoadMolregnoBiotherapeuticComponents(ctx context.Context, e boil.ContextExecutor, singular bool, maybeBiotherapeutic interface{}, mods queries.Applicator) error { + var slice []*Biotherapeutic + var object *Biotherapeutic + + if singular { + object = maybeBiotherapeutic.(*Biotherapeutic) + } else { + slice = *maybeBiotherapeutic.(*[]*Biotherapeutic) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &biotherapeuticR{} + } + args = append(args, object.Molregno) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &biotherapeuticR{} + } + + for _, a := range args { + if a == obj.Molregno { + continue Outer + } + } + + args = append(args, obj.Molregno) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`biotherapeutic_components`), + qm.WhereIn(`biotherapeutic_components.molregno in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load biotherapeutic_components") + } + + var resultSlice []*BiotherapeuticComponent + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice biotherapeutic_components") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on biotherapeutic_components") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for biotherapeutic_components") + } + + if len(biotherapeuticComponentAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.MolregnoBiotherapeuticComponents = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &biotherapeuticComponentR{} + } + foreign.R.MolregnoBiotherapeutic = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.Molregno == foreign.Molregno { + local.R.MolregnoBiotherapeuticComponents = append(local.R.MolregnoBiotherapeuticComponents, foreign) + if foreign.R == nil { + foreign.R = &biotherapeuticComponentR{} + } + foreign.R.MolregnoBiotherapeutic = local + break + } + } + } + + return nil +} + +// SetMolregnoMoleculeDictionary of the biotherapeutic to the related item. +// Sets o.R.MolregnoMoleculeDictionary to related. +// Adds o to related.R.MolregnoBiotherapeutic. +func (o *Biotherapeutic) SetMolregnoMoleculeDictionary(ctx context.Context, exec boil.ContextExecutor, insert bool, related *MoleculeDictionary) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"biotherapeutics\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"molregno"}), + strmangle.WhereClause("\"", "\"", 0, biotherapeuticPrimaryKeyColumns), + ) + values := []interface{}{related.Molregno, o.Molregno} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.Molregno = related.Molregno + if o.R == nil { + o.R = &biotherapeuticR{ + MolregnoMoleculeDictionary: related, + } + } else { + o.R.MolregnoMoleculeDictionary = related + } + + if related.R == nil { + related.R = &moleculeDictionaryR{ + MolregnoBiotherapeutic: o, + } + } else { + related.R.MolregnoBiotherapeutic = o + } + + return nil +} + +// AddMolregnoBiotherapeuticComponents adds the given related objects to the existing relationships +// of the biotherapeutic, optionally inserting them as new records. +// Appends related to o.R.MolregnoBiotherapeuticComponents. +// Sets related.R.MolregnoBiotherapeutic appropriately. +func (o *Biotherapeutic) AddMolregnoBiotherapeuticComponents(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*BiotherapeuticComponent) error { + var err error + for _, rel := range related { + if insert { + rel.Molregno = o.Molregno + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"biotherapeutic_components\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"molregno"}), + strmangle.WhereClause("\"", "\"", 0, biotherapeuticComponentPrimaryKeyColumns), + ) + values := []interface{}{o.Molregno, rel.BiocompID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.Molregno = o.Molregno + } + } + + if o.R == nil { + o.R = &biotherapeuticR{ + MolregnoBiotherapeuticComponents: related, + } + } else { + o.R.MolregnoBiotherapeuticComponents = append(o.R.MolregnoBiotherapeuticComponents, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &biotherapeuticComponentR{ + MolregnoBiotherapeutic: o, + } + } else { + rel.R.MolregnoBiotherapeutic = o + } + } + return nil +} + +// Biotherapeutics retrieves all the records using an executor. +func Biotherapeutics(mods ...qm.QueryMod) biotherapeuticQuery { + mods = append(mods, qm.From("\"biotherapeutics\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"biotherapeutics\".*"}) + } + + return biotherapeuticQuery{q} +} + +// FindBiotherapeutic retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindBiotherapeutic(ctx context.Context, exec boil.ContextExecutor, molregno int64, selectCols ...string) (*Biotherapeutic, error) { + biotherapeuticObj := &Biotherapeutic{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"biotherapeutics\" where \"molregno\"=?", sel, + ) + + q := queries.Raw(query, molregno) + + err := q.Bind(ctx, exec, biotherapeuticObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from biotherapeutics") + } + + if err = biotherapeuticObj.doAfterSelectHooks(ctx, exec); err != nil { + return biotherapeuticObj, err + } + + return biotherapeuticObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *Biotherapeutic) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no biotherapeutics provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(biotherapeuticColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + biotherapeuticInsertCacheMut.RLock() + cache, cached := biotherapeuticInsertCache[key] + biotherapeuticInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + biotherapeuticAllColumns, + biotherapeuticColumnsWithDefault, + biotherapeuticColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(biotherapeuticType, biotherapeuticMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(biotherapeuticType, biotherapeuticMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"biotherapeutics\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"biotherapeutics\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into biotherapeutics") + } + + if !cached { + biotherapeuticInsertCacheMut.Lock() + biotherapeuticInsertCache[key] = cache + biotherapeuticInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the Biotherapeutic. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *Biotherapeutic) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + biotherapeuticUpdateCacheMut.RLock() + cache, cached := biotherapeuticUpdateCache[key] + biotherapeuticUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + biotherapeuticAllColumns, + biotherapeuticPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update biotherapeutics, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"biotherapeutics\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, biotherapeuticPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(biotherapeuticType, biotherapeuticMapping, append(wl, biotherapeuticPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update biotherapeutics row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for biotherapeutics") + } + + if !cached { + biotherapeuticUpdateCacheMut.Lock() + biotherapeuticUpdateCache[key] = cache + biotherapeuticUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q biotherapeuticQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for biotherapeutics") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for biotherapeutics") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o BiotherapeuticSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), biotherapeuticPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"biotherapeutics\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, biotherapeuticPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in biotherapeutic slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all biotherapeutic") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *Biotherapeutic) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no biotherapeutics provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(biotherapeuticColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + biotherapeuticUpsertCacheMut.RLock() + cache, cached := biotherapeuticUpsertCache[key] + biotherapeuticUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + biotherapeuticAllColumns, + biotherapeuticColumnsWithDefault, + biotherapeuticColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + biotherapeuticAllColumns, + biotherapeuticPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert biotherapeutics, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(biotherapeuticPrimaryKeyColumns)) + copy(conflict, biotherapeuticPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"biotherapeutics\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(biotherapeuticType, biotherapeuticMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(biotherapeuticType, biotherapeuticMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert biotherapeutics") + } + + if !cached { + biotherapeuticUpsertCacheMut.Lock() + biotherapeuticUpsertCache[key] = cache + biotherapeuticUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single Biotherapeutic record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *Biotherapeutic) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no Biotherapeutic provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), biotherapeuticPrimaryKeyMapping) + sql := "DELETE FROM \"biotherapeutics\" WHERE \"molregno\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from biotherapeutics") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for biotherapeutics") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q biotherapeuticQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no biotherapeuticQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from biotherapeutics") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for biotherapeutics") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o BiotherapeuticSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(biotherapeuticBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), biotherapeuticPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"biotherapeutics\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, biotherapeuticPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from biotherapeutic slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for biotherapeutics") + } + + if len(biotherapeuticAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *Biotherapeutic) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindBiotherapeutic(ctx, exec, o.Molregno) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *BiotherapeuticSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := BiotherapeuticSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), biotherapeuticPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"biotherapeutics\".* FROM \"biotherapeutics\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, biotherapeuticPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in BiotherapeuticSlice") + } + + *o = slice + + return nil +} + +// BiotherapeuticExists checks if the Biotherapeutic row exists. +func BiotherapeuticExists(ctx context.Context, exec boil.ContextExecutor, molregno int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"biotherapeutics\" where \"molregno\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, molregno) + } + row := exec.QueryRowContext(ctx, sql, molregno) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if biotherapeutics exists") + } + + return exists, nil +} diff --git a/models/boil_queries.go b/models/boil_queries.go new file mode 100644 index 0000000..c438e9d --- /dev/null +++ b/models/boil_queries.go @@ -0,0 +1,33 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "github.com/volatiletech/sqlboiler/v4/drivers" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" +) + +var dialect = drivers.Dialect{ + LQ: 0x22, + RQ: 0x22, + + UseIndexPlaceholders: false, + UseLastInsertID: false, + UseSchema: false, + UseDefaultKeyword: true, + UseAutoColumns: false, + UseTopClause: false, + UseOutputClause: false, + UseCaseWhenExistsClause: false, +} + +// NewQuery initializes a new Query using the passed in QueryMods +func NewQuery(mods ...qm.QueryMod) *queries.Query { + q := &queries.Query{} + queries.SetDialect(q, &dialect) + qm.Apply(q, mods...) + + return q +} diff --git a/models/boil_table_names.go b/models/boil_table_names.go new file mode 100644 index 0000000..daa1f6b --- /dev/null +++ b/models/boil_table_names.go @@ -0,0 +1,190 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +var TableNames = struct { + ActionType string + Activities string + ActivityProperties string + ActivitySmid string + ActivitySTDSLookup string + ActivitySupp string + ActivitySuppMap string + AssayClassMap string + AssayClassification string + AssayParameters string + AssayType string + Assays string + AtcClassification string + BindingSites string + BioComponentSequences string + BioassayOntology string + BiotherapeuticComponents string + Biotherapeutics string + CellDictionary string + Chebi string + ChemblIDLookup string + ComponentClass string + ComponentDomains string + ComponentGo string + ComponentSequences string + ComponentSynonyms string + Compound string + CompoundProperties string + CompoundRecords string + CompoundStructuralAlerts string + CompoundStructures string + ConfidenceScoreLookup string + CurationLookup string + DataValidityLookup string + DefinedDailyDose string + Docs string + Domains string + DrugIndication string + DrugMechanism string + DrugWarning string + Formulations string + FracClassification string + Genbank string + GenbankFeatures string + GoClassification string + HracClassification string + IndicationRefs string + IracClassification string + LigandEff string + MechanismRefs string + Metabolism string + MetabolismRefs string + MoleculeAtcClassification string + MoleculeDictionary string + MoleculeFracClassification string + MoleculeHierarchy string + MoleculeHracClassification string + MoleculeIracClassification string + MoleculeSynonyms string + OrganismClass string + PatentUseCodes string + PredictedBindingDomains string + ProductPatents string + Products string + ProteinClassSynonyms string + ProteinClassification string + ProteinFamilyClassification string + Reaction string + ReactionParticipant string + Reactionside string + ReactionsideReaction string + ReactivePart string + RelationshipType string + ResearchCompanies string + ResearchStem string + Seqhash string + SiteComponents string + Source string + StructuralAlertSets string + StructuralAlerts string + TargetComponents string + TargetDictionary string + TargetRelations string + TargetType string + TissueDictionary string + Uniprot string + UniprotToReaction string + UsanStems string + VariantSequences string + Version string + WarningRefs string +}{ + ActionType: "action_type", + Activities: "activities", + ActivityProperties: "activity_properties", + ActivitySmid: "activity_smid", + ActivitySTDSLookup: "activity_stds_lookup", + ActivitySupp: "activity_supp", + ActivitySuppMap: "activity_supp_map", + AssayClassMap: "assay_class_map", + AssayClassification: "assay_classification", + AssayParameters: "assay_parameters", + AssayType: "assay_type", + Assays: "assays", + AtcClassification: "atc_classification", + BindingSites: "binding_sites", + BioComponentSequences: "bio_component_sequences", + BioassayOntology: "bioassay_ontology", + BiotherapeuticComponents: "biotherapeutic_components", + Biotherapeutics: "biotherapeutics", + CellDictionary: "cell_dictionary", + Chebi: "chebi", + ChemblIDLookup: "chembl_id_lookup", + ComponentClass: "component_class", + ComponentDomains: "component_domains", + ComponentGo: "component_go", + ComponentSequences: "component_sequences", + ComponentSynonyms: "component_synonyms", + Compound: "compound", + CompoundProperties: "compound_properties", + CompoundRecords: "compound_records", + CompoundStructuralAlerts: "compound_structural_alerts", + CompoundStructures: "compound_structures", + ConfidenceScoreLookup: "confidence_score_lookup", + CurationLookup: "curation_lookup", + DataValidityLookup: "data_validity_lookup", + DefinedDailyDose: "defined_daily_dose", + Docs: "docs", + Domains: "domains", + DrugIndication: "drug_indication", + DrugMechanism: "drug_mechanism", + DrugWarning: "drug_warning", + Formulations: "formulations", + FracClassification: "frac_classification", + Genbank: "genbank", + GenbankFeatures: "genbank_features", + GoClassification: "go_classification", + HracClassification: "hrac_classification", + IndicationRefs: "indication_refs", + IracClassification: "irac_classification", + LigandEff: "ligand_eff", + MechanismRefs: "mechanism_refs", + Metabolism: "metabolism", + MetabolismRefs: "metabolism_refs", + MoleculeAtcClassification: "molecule_atc_classification", + MoleculeDictionary: "molecule_dictionary", + MoleculeFracClassification: "molecule_frac_classification", + MoleculeHierarchy: "molecule_hierarchy", + MoleculeHracClassification: "molecule_hrac_classification", + MoleculeIracClassification: "molecule_irac_classification", + MoleculeSynonyms: "molecule_synonyms", + OrganismClass: "organism_class", + PatentUseCodes: "patent_use_codes", + PredictedBindingDomains: "predicted_binding_domains", + ProductPatents: "product_patents", + Products: "products", + ProteinClassSynonyms: "protein_class_synonyms", + ProteinClassification: "protein_classification", + ProteinFamilyClassification: "protein_family_classification", + Reaction: "reaction", + ReactionParticipant: "reaction_participant", + Reactionside: "reactionside", + ReactionsideReaction: "reactionside_reaction", + ReactivePart: "reactive_part", + RelationshipType: "relationship_type", + ResearchCompanies: "research_companies", + ResearchStem: "research_stem", + Seqhash: "seqhash", + SiteComponents: "site_components", + Source: "source", + StructuralAlertSets: "structural_alert_sets", + StructuralAlerts: "structural_alerts", + TargetComponents: "target_components", + TargetDictionary: "target_dictionary", + TargetRelations: "target_relations", + TargetType: "target_type", + TissueDictionary: "tissue_dictionary", + Uniprot: "uniprot", + UniprotToReaction: "uniprot_to_reaction", + UsanStems: "usan_stems", + VariantSequences: "variant_sequences", + Version: "version", + WarningRefs: "warning_refs", +} diff --git a/models/boil_types.go b/models/boil_types.go new file mode 100644 index 0000000..3d1f76d --- /dev/null +++ b/models/boil_types.go @@ -0,0 +1,52 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "strconv" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/strmangle" +) + +// M type is for providing columns and column values to UpdateAll. +type M map[string]interface{} + +// ErrSyncFail occurs during insert when the record could not be retrieved in +// order to populate default value information. This usually happens when LastInsertId +// fails or there was a primary key configuration that was not resolvable. +var ErrSyncFail = errors.New("models: failed to synchronize data after insert") + +type insertCache struct { + query string + retQuery string + valueMapping []uint64 + retMapping []uint64 +} + +type updateCache struct { + query string + valueMapping []uint64 +} + +func makeCacheKey(cols boil.Columns, nzDefaults []string) string { + buf := strmangle.GetBuffer() + + buf.WriteString(strconv.Itoa(cols.Kind)) + for _, w := range cols.Cols { + buf.WriteString(w) + } + + if len(nzDefaults) != 0 { + buf.WriteByte('.') + } + for _, nz := range nzDefaults { + buf.WriteString(nz) + } + + str := buf.String() + strmangle.PutBuffer(buf) + return str +} diff --git a/models/boil_view_names.go b/models/boil_view_names.go new file mode 100644 index 0000000..8ae6405 --- /dev/null +++ b/models/boil_view_names.go @@ -0,0 +1,7 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +var ViewNames = struct { +}{} diff --git a/models/cell_dictionary.go b/models/cell_dictionary.go new file mode 100644 index 0000000..31351fa --- /dev/null +++ b/models/cell_dictionary.go @@ -0,0 +1,1408 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// CellDictionary is an object representing the database table. +type CellDictionary struct { + CellID int64 `boil:"cell_id" json:"cell_id" toml:"cell_id" yaml:"cell_id"` + CellName string `boil:"cell_name" json:"cell_name" toml:"cell_name" yaml:"cell_name"` + CellDescription null.String `boil:"cell_description" json:"cell_description,omitempty" toml:"cell_description" yaml:"cell_description,omitempty"` + CellSourceTissue null.String `boil:"cell_source_tissue" json:"cell_source_tissue,omitempty" toml:"cell_source_tissue" yaml:"cell_source_tissue,omitempty"` + CellSourceOrganism null.String `boil:"cell_source_organism" json:"cell_source_organism,omitempty" toml:"cell_source_organism" yaml:"cell_source_organism,omitempty"` + CellSourceTaxID null.Int64 `boil:"cell_source_tax_id" json:"cell_source_tax_id,omitempty" toml:"cell_source_tax_id" yaml:"cell_source_tax_id,omitempty"` + CloID null.String `boil:"clo_id" json:"clo_id,omitempty" toml:"clo_id" yaml:"clo_id,omitempty"` + EfoID null.String `boil:"efo_id" json:"efo_id,omitempty" toml:"efo_id" yaml:"efo_id,omitempty"` + CellosaurusID null.String `boil:"cellosaurus_id" json:"cellosaurus_id,omitempty" toml:"cellosaurus_id" yaml:"cellosaurus_id,omitempty"` + CLLincsID null.String `boil:"cl_lincs_id" json:"cl_lincs_id,omitempty" toml:"cl_lincs_id" yaml:"cl_lincs_id,omitempty"` + ChemblID null.String `boil:"chembl_id" json:"chembl_id,omitempty" toml:"chembl_id" yaml:"chembl_id,omitempty"` + CellOntologyID null.String `boil:"cell_ontology_id" json:"cell_ontology_id,omitempty" toml:"cell_ontology_id" yaml:"cell_ontology_id,omitempty"` + + R *cellDictionaryR `boil:"-" json:"-" toml:"-" yaml:"-"` + L cellDictionaryL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var CellDictionaryColumns = struct { + CellID string + CellName string + CellDescription string + CellSourceTissue string + CellSourceOrganism string + CellSourceTaxID string + CloID string + EfoID string + CellosaurusID string + CLLincsID string + ChemblID string + CellOntologyID string +}{ + CellID: "cell_id", + CellName: "cell_name", + CellDescription: "cell_description", + CellSourceTissue: "cell_source_tissue", + CellSourceOrganism: "cell_source_organism", + CellSourceTaxID: "cell_source_tax_id", + CloID: "clo_id", + EfoID: "efo_id", + CellosaurusID: "cellosaurus_id", + CLLincsID: "cl_lincs_id", + ChemblID: "chembl_id", + CellOntologyID: "cell_ontology_id", +} + +var CellDictionaryTableColumns = struct { + CellID string + CellName string + CellDescription string + CellSourceTissue string + CellSourceOrganism string + CellSourceTaxID string + CloID string + EfoID string + CellosaurusID string + CLLincsID string + ChemblID string + CellOntologyID string +}{ + CellID: "cell_dictionary.cell_id", + CellName: "cell_dictionary.cell_name", + CellDescription: "cell_dictionary.cell_description", + CellSourceTissue: "cell_dictionary.cell_source_tissue", + CellSourceOrganism: "cell_dictionary.cell_source_organism", + CellSourceTaxID: "cell_dictionary.cell_source_tax_id", + CloID: "cell_dictionary.clo_id", + EfoID: "cell_dictionary.efo_id", + CellosaurusID: "cell_dictionary.cellosaurus_id", + CLLincsID: "cell_dictionary.cl_lincs_id", + ChemblID: "cell_dictionary.chembl_id", + CellOntologyID: "cell_dictionary.cell_ontology_id", +} + +// Generated where + +var CellDictionaryWhere = struct { + CellID whereHelperint64 + CellName whereHelperstring + CellDescription whereHelpernull_String + CellSourceTissue whereHelpernull_String + CellSourceOrganism whereHelpernull_String + CellSourceTaxID whereHelpernull_Int64 + CloID whereHelpernull_String + EfoID whereHelpernull_String + CellosaurusID whereHelpernull_String + CLLincsID whereHelpernull_String + ChemblID whereHelpernull_String + CellOntologyID whereHelpernull_String +}{ + CellID: whereHelperint64{field: "\"cell_dictionary\".\"cell_id\""}, + CellName: whereHelperstring{field: "\"cell_dictionary\".\"cell_name\""}, + CellDescription: whereHelpernull_String{field: "\"cell_dictionary\".\"cell_description\""}, + CellSourceTissue: whereHelpernull_String{field: "\"cell_dictionary\".\"cell_source_tissue\""}, + CellSourceOrganism: whereHelpernull_String{field: "\"cell_dictionary\".\"cell_source_organism\""}, + CellSourceTaxID: whereHelpernull_Int64{field: "\"cell_dictionary\".\"cell_source_tax_id\""}, + CloID: whereHelpernull_String{field: "\"cell_dictionary\".\"clo_id\""}, + EfoID: whereHelpernull_String{field: "\"cell_dictionary\".\"efo_id\""}, + CellosaurusID: whereHelpernull_String{field: "\"cell_dictionary\".\"cellosaurus_id\""}, + CLLincsID: whereHelpernull_String{field: "\"cell_dictionary\".\"cl_lincs_id\""}, + ChemblID: whereHelpernull_String{field: "\"cell_dictionary\".\"chembl_id\""}, + CellOntologyID: whereHelpernull_String{field: "\"cell_dictionary\".\"cell_ontology_id\""}, +} + +// CellDictionaryRels is where relationship names are stored. +var CellDictionaryRels = struct { + Chembl string + CellAssays string +}{ + Chembl: "Chembl", + CellAssays: "CellAssays", +} + +// cellDictionaryR is where relationships are stored. +type cellDictionaryR struct { + Chembl *ChemblIDLookup `boil:"Chembl" json:"Chembl" toml:"Chembl" yaml:"Chembl"` + CellAssays AssaySlice `boil:"CellAssays" json:"CellAssays" toml:"CellAssays" yaml:"CellAssays"` +} + +// NewStruct creates a new relationship struct +func (*cellDictionaryR) NewStruct() *cellDictionaryR { + return &cellDictionaryR{} +} + +func (r *cellDictionaryR) GetChembl() *ChemblIDLookup { + if r == nil { + return nil + } + return r.Chembl +} + +func (r *cellDictionaryR) GetCellAssays() AssaySlice { + if r == nil { + return nil + } + return r.CellAssays +} + +// cellDictionaryL is where Load methods for each relationship are stored. +type cellDictionaryL struct{} + +var ( + cellDictionaryAllColumns = []string{"cell_id", "cell_name", "cell_description", "cell_source_tissue", "cell_source_organism", "cell_source_tax_id", "clo_id", "efo_id", "cellosaurus_id", "cl_lincs_id", "chembl_id", "cell_ontology_id"} + cellDictionaryColumnsWithoutDefault = []string{"cell_id", "cell_name"} + cellDictionaryColumnsWithDefault = []string{"cell_description", "cell_source_tissue", "cell_source_organism", "cell_source_tax_id", "clo_id", "efo_id", "cellosaurus_id", "cl_lincs_id", "chembl_id", "cell_ontology_id"} + cellDictionaryPrimaryKeyColumns = []string{"cell_id"} + cellDictionaryGeneratedColumns = []string{} +) + +type ( + // CellDictionarySlice is an alias for a slice of pointers to CellDictionary. + // This should almost always be used instead of []CellDictionary. + CellDictionarySlice []*CellDictionary + // CellDictionaryHook is the signature for custom CellDictionary hook methods + CellDictionaryHook func(context.Context, boil.ContextExecutor, *CellDictionary) error + + cellDictionaryQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + cellDictionaryType = reflect.TypeOf(&CellDictionary{}) + cellDictionaryMapping = queries.MakeStructMapping(cellDictionaryType) + cellDictionaryPrimaryKeyMapping, _ = queries.BindMapping(cellDictionaryType, cellDictionaryMapping, cellDictionaryPrimaryKeyColumns) + cellDictionaryInsertCacheMut sync.RWMutex + cellDictionaryInsertCache = make(map[string]insertCache) + cellDictionaryUpdateCacheMut sync.RWMutex + cellDictionaryUpdateCache = make(map[string]updateCache) + cellDictionaryUpsertCacheMut sync.RWMutex + cellDictionaryUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var cellDictionaryAfterSelectHooks []CellDictionaryHook + +var cellDictionaryBeforeInsertHooks []CellDictionaryHook +var cellDictionaryAfterInsertHooks []CellDictionaryHook + +var cellDictionaryBeforeUpdateHooks []CellDictionaryHook +var cellDictionaryAfterUpdateHooks []CellDictionaryHook + +var cellDictionaryBeforeDeleteHooks []CellDictionaryHook +var cellDictionaryAfterDeleteHooks []CellDictionaryHook + +var cellDictionaryBeforeUpsertHooks []CellDictionaryHook +var cellDictionaryAfterUpsertHooks []CellDictionaryHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *CellDictionary) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range cellDictionaryAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *CellDictionary) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range cellDictionaryBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *CellDictionary) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range cellDictionaryAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *CellDictionary) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range cellDictionaryBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *CellDictionary) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range cellDictionaryAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *CellDictionary) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range cellDictionaryBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *CellDictionary) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range cellDictionaryAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *CellDictionary) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range cellDictionaryBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *CellDictionary) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range cellDictionaryAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddCellDictionaryHook registers your hook function for all future operations. +func AddCellDictionaryHook(hookPoint boil.HookPoint, cellDictionaryHook CellDictionaryHook) { + switch hookPoint { + case boil.AfterSelectHook: + cellDictionaryAfterSelectHooks = append(cellDictionaryAfterSelectHooks, cellDictionaryHook) + case boil.BeforeInsertHook: + cellDictionaryBeforeInsertHooks = append(cellDictionaryBeforeInsertHooks, cellDictionaryHook) + case boil.AfterInsertHook: + cellDictionaryAfterInsertHooks = append(cellDictionaryAfterInsertHooks, cellDictionaryHook) + case boil.BeforeUpdateHook: + cellDictionaryBeforeUpdateHooks = append(cellDictionaryBeforeUpdateHooks, cellDictionaryHook) + case boil.AfterUpdateHook: + cellDictionaryAfterUpdateHooks = append(cellDictionaryAfterUpdateHooks, cellDictionaryHook) + case boil.BeforeDeleteHook: + cellDictionaryBeforeDeleteHooks = append(cellDictionaryBeforeDeleteHooks, cellDictionaryHook) + case boil.AfterDeleteHook: + cellDictionaryAfterDeleteHooks = append(cellDictionaryAfterDeleteHooks, cellDictionaryHook) + case boil.BeforeUpsertHook: + cellDictionaryBeforeUpsertHooks = append(cellDictionaryBeforeUpsertHooks, cellDictionaryHook) + case boil.AfterUpsertHook: + cellDictionaryAfterUpsertHooks = append(cellDictionaryAfterUpsertHooks, cellDictionaryHook) + } +} + +// One returns a single cellDictionary record from the query. +func (q cellDictionaryQuery) One(ctx context.Context, exec boil.ContextExecutor) (*CellDictionary, error) { + o := &CellDictionary{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for cell_dictionary") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all CellDictionary records from the query. +func (q cellDictionaryQuery) All(ctx context.Context, exec boil.ContextExecutor) (CellDictionarySlice, error) { + var o []*CellDictionary + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to CellDictionary slice") + } + + if len(cellDictionaryAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all CellDictionary records in the query. +func (q cellDictionaryQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count cell_dictionary rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q cellDictionaryQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if cell_dictionary exists") + } + + return count > 0, nil +} + +// Chembl pointed to by the foreign key. +func (o *CellDictionary) Chembl(mods ...qm.QueryMod) chemblIDLookupQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"chembl_id\" = ?", o.ChemblID), + } + + queryMods = append(queryMods, mods...) + + return ChemblIDLookups(queryMods...) +} + +// CellAssays retrieves all the assay's Assays with an executor via cell_id column. +func (o *CellDictionary) CellAssays(mods ...qm.QueryMod) assayQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"assays\".\"cell_id\"=?", o.CellID), + ) + + return Assays(queryMods...) +} + +// LoadChembl allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (cellDictionaryL) LoadChembl(ctx context.Context, e boil.ContextExecutor, singular bool, maybeCellDictionary interface{}, mods queries.Applicator) error { + var slice []*CellDictionary + var object *CellDictionary + + if singular { + object = maybeCellDictionary.(*CellDictionary) + } else { + slice = *maybeCellDictionary.(*[]*CellDictionary) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &cellDictionaryR{} + } + if !queries.IsNil(object.ChemblID) { + args = append(args, object.ChemblID) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &cellDictionaryR{} + } + + for _, a := range args { + if queries.Equal(a, obj.ChemblID) { + continue Outer + } + } + + if !queries.IsNil(obj.ChemblID) { + args = append(args, obj.ChemblID) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`chembl_id_lookup`), + qm.WhereIn(`chembl_id_lookup.chembl_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load ChemblIDLookup") + } + + var resultSlice []*ChemblIDLookup + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice ChemblIDLookup") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for chembl_id_lookup") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for chembl_id_lookup") + } + + if len(cellDictionaryAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Chembl = foreign + if foreign.R == nil { + foreign.R = &chemblIDLookupR{} + } + foreign.R.ChemblCellDictionary = object + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.ChemblID, foreign.ChemblID) { + local.R.Chembl = foreign + if foreign.R == nil { + foreign.R = &chemblIDLookupR{} + } + foreign.R.ChemblCellDictionary = local + break + } + } + } + + return nil +} + +// LoadCellAssays allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (cellDictionaryL) LoadCellAssays(ctx context.Context, e boil.ContextExecutor, singular bool, maybeCellDictionary interface{}, mods queries.Applicator) error { + var slice []*CellDictionary + var object *CellDictionary + + if singular { + object = maybeCellDictionary.(*CellDictionary) + } else { + slice = *maybeCellDictionary.(*[]*CellDictionary) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &cellDictionaryR{} + } + args = append(args, object.CellID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &cellDictionaryR{} + } + + for _, a := range args { + if queries.Equal(a, obj.CellID) { + continue Outer + } + } + + args = append(args, obj.CellID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`assays`), + qm.WhereIn(`assays.cell_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load assays") + } + + var resultSlice []*Assay + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice assays") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on assays") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for assays") + } + + if len(assayAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.CellAssays = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &assayR{} + } + foreign.R.Cell = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.CellID, foreign.CellID) { + local.R.CellAssays = append(local.R.CellAssays, foreign) + if foreign.R == nil { + foreign.R = &assayR{} + } + foreign.R.Cell = local + break + } + } + } + + return nil +} + +// SetChembl of the cellDictionary to the related item. +// Sets o.R.Chembl to related. +// Adds o to related.R.ChemblCellDictionary. +func (o *CellDictionary) SetChembl(ctx context.Context, exec boil.ContextExecutor, insert bool, related *ChemblIDLookup) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"cell_dictionary\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"chembl_id"}), + strmangle.WhereClause("\"", "\"", 0, cellDictionaryPrimaryKeyColumns), + ) + values := []interface{}{related.ChemblID, o.CellID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.ChemblID, related.ChemblID) + if o.R == nil { + o.R = &cellDictionaryR{ + Chembl: related, + } + } else { + o.R.Chembl = related + } + + if related.R == nil { + related.R = &chemblIDLookupR{ + ChemblCellDictionary: o, + } + } else { + related.R.ChemblCellDictionary = o + } + + return nil +} + +// RemoveChembl relationship. +// Sets o.R.Chembl to nil. +// Removes o from all passed in related items' relationships struct. +func (o *CellDictionary) RemoveChembl(ctx context.Context, exec boil.ContextExecutor, related *ChemblIDLookup) error { + var err error + + queries.SetScanner(&o.ChemblID, nil) + if _, err = o.Update(ctx, exec, boil.Whitelist("chembl_id")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.Chembl = nil + } + if related == nil || related.R == nil { + return nil + } + + related.R.ChemblCellDictionary = nil + return nil +} + +// AddCellAssays adds the given related objects to the existing relationships +// of the cell_dictionary, optionally inserting them as new records. +// Appends related to o.R.CellAssays. +// Sets related.R.Cell appropriately. +func (o *CellDictionary) AddCellAssays(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Assay) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.CellID, o.CellID) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"assays\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"cell_id"}), + strmangle.WhereClause("\"", "\"", 0, assayPrimaryKeyColumns), + ) + values := []interface{}{o.CellID, rel.AssayID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.CellID, o.CellID) + } + } + + if o.R == nil { + o.R = &cellDictionaryR{ + CellAssays: related, + } + } else { + o.R.CellAssays = append(o.R.CellAssays, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &assayR{ + Cell: o, + } + } else { + rel.R.Cell = o + } + } + return nil +} + +// SetCellAssays removes all previously related items of the +// cell_dictionary replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.Cell's CellAssays accordingly. +// Replaces o.R.CellAssays with related. +// Sets related.R.Cell's CellAssays accordingly. +func (o *CellDictionary) SetCellAssays(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Assay) error { + query := "update \"assays\" set \"cell_id\" = null where \"cell_id\" = ?" + values := []interface{}{o.CellID} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + if o.R != nil { + for _, rel := range o.R.CellAssays { + queries.SetScanner(&rel.CellID, nil) + if rel.R == nil { + continue + } + + rel.R.Cell = nil + } + o.R.CellAssays = nil + } + + return o.AddCellAssays(ctx, exec, insert, related...) +} + +// RemoveCellAssays relationships from objects passed in. +// Removes related items from R.CellAssays (uses pointer comparison, removal does not keep order) +// Sets related.R.Cell. +func (o *CellDictionary) RemoveCellAssays(ctx context.Context, exec boil.ContextExecutor, related ...*Assay) error { + if len(related) == 0 { + return nil + } + + var err error + for _, rel := range related { + queries.SetScanner(&rel.CellID, nil) + if rel.R != nil { + rel.R.Cell = nil + } + if _, err = rel.Update(ctx, exec, boil.Whitelist("cell_id")); err != nil { + return err + } + } + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.CellAssays { + if rel != ri { + continue + } + + ln := len(o.R.CellAssays) + if ln > 1 && i < ln-1 { + o.R.CellAssays[i] = o.R.CellAssays[ln-1] + } + o.R.CellAssays = o.R.CellAssays[:ln-1] + break + } + } + + return nil +} + +// CellDictionaries retrieves all the records using an executor. +func CellDictionaries(mods ...qm.QueryMod) cellDictionaryQuery { + mods = append(mods, qm.From("\"cell_dictionary\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"cell_dictionary\".*"}) + } + + return cellDictionaryQuery{q} +} + +// FindCellDictionary retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindCellDictionary(ctx context.Context, exec boil.ContextExecutor, cellID int64, selectCols ...string) (*CellDictionary, error) { + cellDictionaryObj := &CellDictionary{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"cell_dictionary\" where \"cell_id\"=?", sel, + ) + + q := queries.Raw(query, cellID) + + err := q.Bind(ctx, exec, cellDictionaryObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from cell_dictionary") + } + + if err = cellDictionaryObj.doAfterSelectHooks(ctx, exec); err != nil { + return cellDictionaryObj, err + } + + return cellDictionaryObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *CellDictionary) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no cell_dictionary provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(cellDictionaryColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + cellDictionaryInsertCacheMut.RLock() + cache, cached := cellDictionaryInsertCache[key] + cellDictionaryInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + cellDictionaryAllColumns, + cellDictionaryColumnsWithDefault, + cellDictionaryColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(cellDictionaryType, cellDictionaryMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(cellDictionaryType, cellDictionaryMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"cell_dictionary\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"cell_dictionary\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into cell_dictionary") + } + + if !cached { + cellDictionaryInsertCacheMut.Lock() + cellDictionaryInsertCache[key] = cache + cellDictionaryInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the CellDictionary. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *CellDictionary) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + cellDictionaryUpdateCacheMut.RLock() + cache, cached := cellDictionaryUpdateCache[key] + cellDictionaryUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + cellDictionaryAllColumns, + cellDictionaryPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update cell_dictionary, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"cell_dictionary\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, cellDictionaryPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(cellDictionaryType, cellDictionaryMapping, append(wl, cellDictionaryPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update cell_dictionary row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for cell_dictionary") + } + + if !cached { + cellDictionaryUpdateCacheMut.Lock() + cellDictionaryUpdateCache[key] = cache + cellDictionaryUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q cellDictionaryQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for cell_dictionary") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for cell_dictionary") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o CellDictionarySlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), cellDictionaryPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"cell_dictionary\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, cellDictionaryPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in cellDictionary slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all cellDictionary") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *CellDictionary) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no cell_dictionary provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(cellDictionaryColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + cellDictionaryUpsertCacheMut.RLock() + cache, cached := cellDictionaryUpsertCache[key] + cellDictionaryUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + cellDictionaryAllColumns, + cellDictionaryColumnsWithDefault, + cellDictionaryColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + cellDictionaryAllColumns, + cellDictionaryPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert cell_dictionary, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(cellDictionaryPrimaryKeyColumns)) + copy(conflict, cellDictionaryPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"cell_dictionary\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(cellDictionaryType, cellDictionaryMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(cellDictionaryType, cellDictionaryMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert cell_dictionary") + } + + if !cached { + cellDictionaryUpsertCacheMut.Lock() + cellDictionaryUpsertCache[key] = cache + cellDictionaryUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single CellDictionary record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *CellDictionary) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no CellDictionary provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cellDictionaryPrimaryKeyMapping) + sql := "DELETE FROM \"cell_dictionary\" WHERE \"cell_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from cell_dictionary") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for cell_dictionary") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q cellDictionaryQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no cellDictionaryQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from cell_dictionary") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for cell_dictionary") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o CellDictionarySlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(cellDictionaryBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), cellDictionaryPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"cell_dictionary\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, cellDictionaryPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from cellDictionary slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for cell_dictionary") + } + + if len(cellDictionaryAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *CellDictionary) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindCellDictionary(ctx, exec, o.CellID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *CellDictionarySlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := CellDictionarySlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), cellDictionaryPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"cell_dictionary\".* FROM \"cell_dictionary\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, cellDictionaryPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in CellDictionarySlice") + } + + *o = slice + + return nil +} + +// CellDictionaryExists checks if the CellDictionary row exists. +func CellDictionaryExists(ctx context.Context, exec boil.ContextExecutor, cellID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"cell_dictionary\" where \"cell_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, cellID) + } + row := exec.QueryRowContext(ctx, sql, cellID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if cell_dictionary exists") + } + + return exists, nil +} diff --git a/models/chebi.go b/models/chebi.go new file mode 100644 index 0000000..24ee1be --- /dev/null +++ b/models/chebi.go @@ -0,0 +1,1598 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// Chebi is an object representing the database table. +type Chebi struct { + Accession null.String `boil:"accession" json:"accession,omitempty" toml:"accession" yaml:"accession,omitempty"` + SubclassOf null.String `boil:"subclass_of" json:"subclass_of,omitempty" toml:"subclass_of" yaml:"subclass_of,omitempty"` + + R *chebiR `boil:"-" json:"-" toml:"-" yaml:"-"` + L chebiL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var ChebiColumns = struct { + Accession string + SubclassOf string +}{ + Accession: "accession", + SubclassOf: "subclass_of", +} + +var ChebiTableColumns = struct { + Accession string + SubclassOf string +}{ + Accession: "chebi.accession", + SubclassOf: "chebi.subclass_of", +} + +// Generated where + +var ChebiWhere = struct { + Accession whereHelpernull_String + SubclassOf whereHelpernull_String +}{ + Accession: whereHelpernull_String{field: "\"chebi\".\"accession\""}, + SubclassOf: whereHelpernull_String{field: "\"chebi\".\"subclass_of\""}, +} + +// ChebiRels is where relationship names are stored. +var ChebiRels = struct { + SubclassOfChebi string + SubclassOfChebis string + Compounds string +}{ + SubclassOfChebi: "SubclassOfChebi", + SubclassOfChebis: "SubclassOfChebis", + Compounds: "Compounds", +} + +// chebiR is where relationships are stored. +type chebiR struct { + SubclassOfChebi *Chebi `boil:"SubclassOfChebi" json:"SubclassOfChebi" toml:"SubclassOfChebi" yaml:"SubclassOfChebi"` + SubclassOfChebis ChebiSlice `boil:"SubclassOfChebis" json:"SubclassOfChebis" toml:"SubclassOfChebis" yaml:"SubclassOfChebis"` + Compounds CompoundSlice `boil:"Compounds" json:"Compounds" toml:"Compounds" yaml:"Compounds"` +} + +// NewStruct creates a new relationship struct +func (*chebiR) NewStruct() *chebiR { + return &chebiR{} +} + +func (r *chebiR) GetSubclassOfChebi() *Chebi { + if r == nil { + return nil + } + return r.SubclassOfChebi +} + +func (r *chebiR) GetSubclassOfChebis() ChebiSlice { + if r == nil { + return nil + } + return r.SubclassOfChebis +} + +func (r *chebiR) GetCompounds() CompoundSlice { + if r == nil { + return nil + } + return r.Compounds +} + +// chebiL is where Load methods for each relationship are stored. +type chebiL struct{} + +var ( + chebiAllColumns = []string{"accession", "subclass_of"} + chebiColumnsWithoutDefault = []string{} + chebiColumnsWithDefault = []string{"accession", "subclass_of"} + chebiPrimaryKeyColumns = []string{"accession"} + chebiGeneratedColumns = []string{} +) + +type ( + // ChebiSlice is an alias for a slice of pointers to Chebi. + // This should almost always be used instead of []Chebi. + ChebiSlice []*Chebi + // ChebiHook is the signature for custom Chebi hook methods + ChebiHook func(context.Context, boil.ContextExecutor, *Chebi) error + + chebiQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + chebiType = reflect.TypeOf(&Chebi{}) + chebiMapping = queries.MakeStructMapping(chebiType) + chebiPrimaryKeyMapping, _ = queries.BindMapping(chebiType, chebiMapping, chebiPrimaryKeyColumns) + chebiInsertCacheMut sync.RWMutex + chebiInsertCache = make(map[string]insertCache) + chebiUpdateCacheMut sync.RWMutex + chebiUpdateCache = make(map[string]updateCache) + chebiUpsertCacheMut sync.RWMutex + chebiUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var chebiAfterSelectHooks []ChebiHook + +var chebiBeforeInsertHooks []ChebiHook +var chebiAfterInsertHooks []ChebiHook + +var chebiBeforeUpdateHooks []ChebiHook +var chebiAfterUpdateHooks []ChebiHook + +var chebiBeforeDeleteHooks []ChebiHook +var chebiAfterDeleteHooks []ChebiHook + +var chebiBeforeUpsertHooks []ChebiHook +var chebiAfterUpsertHooks []ChebiHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *Chebi) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range chebiAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *Chebi) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range chebiBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *Chebi) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range chebiAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *Chebi) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range chebiBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *Chebi) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range chebiAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *Chebi) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range chebiBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *Chebi) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range chebiAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *Chebi) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range chebiBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *Chebi) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range chebiAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddChebiHook registers your hook function for all future operations. +func AddChebiHook(hookPoint boil.HookPoint, chebiHook ChebiHook) { + switch hookPoint { + case boil.AfterSelectHook: + chebiAfterSelectHooks = append(chebiAfterSelectHooks, chebiHook) + case boil.BeforeInsertHook: + chebiBeforeInsertHooks = append(chebiBeforeInsertHooks, chebiHook) + case boil.AfterInsertHook: + chebiAfterInsertHooks = append(chebiAfterInsertHooks, chebiHook) + case boil.BeforeUpdateHook: + chebiBeforeUpdateHooks = append(chebiBeforeUpdateHooks, chebiHook) + case boil.AfterUpdateHook: + chebiAfterUpdateHooks = append(chebiAfterUpdateHooks, chebiHook) + case boil.BeforeDeleteHook: + chebiBeforeDeleteHooks = append(chebiBeforeDeleteHooks, chebiHook) + case boil.AfterDeleteHook: + chebiAfterDeleteHooks = append(chebiAfterDeleteHooks, chebiHook) + case boil.BeforeUpsertHook: + chebiBeforeUpsertHooks = append(chebiBeforeUpsertHooks, chebiHook) + case boil.AfterUpsertHook: + chebiAfterUpsertHooks = append(chebiAfterUpsertHooks, chebiHook) + } +} + +// One returns a single chebi record from the query. +func (q chebiQuery) One(ctx context.Context, exec boil.ContextExecutor) (*Chebi, error) { + o := &Chebi{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for chebi") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all Chebi records from the query. +func (q chebiQuery) All(ctx context.Context, exec boil.ContextExecutor) (ChebiSlice, error) { + var o []*Chebi + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to Chebi slice") + } + + if len(chebiAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all Chebi records in the query. +func (q chebiQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count chebi rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q chebiQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if chebi exists") + } + + return count > 0, nil +} + +// SubclassOfChebi pointed to by the foreign key. +func (o *Chebi) SubclassOfChebi(mods ...qm.QueryMod) chebiQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"accession\" = ?", o.SubclassOf), + } + + queryMods = append(queryMods, mods...) + + return Chebis(queryMods...) +} + +// SubclassOfChebis retrieves all the chebi's Chebis with an executor via subclass_of column. +func (o *Chebi) SubclassOfChebis(mods ...qm.QueryMod) chebiQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"chebi\".\"subclass_of\"=?", o.Accession), + ) + + return Chebis(queryMods...) +} + +// Compounds retrieves all the compound's Compounds with an executor. +func (o *Chebi) Compounds(mods ...qm.QueryMod) compoundQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"compound\".\"chebi\"=?", o.Accession), + ) + + return Compounds(queryMods...) +} + +// LoadSubclassOfChebi allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (chebiL) LoadSubclassOfChebi(ctx context.Context, e boil.ContextExecutor, singular bool, maybeChebi interface{}, mods queries.Applicator) error { + var slice []*Chebi + var object *Chebi + + if singular { + object = maybeChebi.(*Chebi) + } else { + slice = *maybeChebi.(*[]*Chebi) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &chebiR{} + } + if !queries.IsNil(object.SubclassOf) { + args = append(args, object.SubclassOf) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &chebiR{} + } + + for _, a := range args { + if queries.Equal(a, obj.SubclassOf) { + continue Outer + } + } + + if !queries.IsNil(obj.SubclassOf) { + args = append(args, obj.SubclassOf) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`chebi`), + qm.WhereIn(`chebi.accession in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load Chebi") + } + + var resultSlice []*Chebi + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice Chebi") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for chebi") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for chebi") + } + + if len(chebiAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.SubclassOfChebi = foreign + if foreign.R == nil { + foreign.R = &chebiR{} + } + foreign.R.SubclassOfChebis = append(foreign.R.SubclassOfChebis, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.SubclassOf, foreign.Accession) { + local.R.SubclassOfChebi = foreign + if foreign.R == nil { + foreign.R = &chebiR{} + } + foreign.R.SubclassOfChebis = append(foreign.R.SubclassOfChebis, local) + break + } + } + } + + return nil +} + +// LoadSubclassOfChebis allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (chebiL) LoadSubclassOfChebis(ctx context.Context, e boil.ContextExecutor, singular bool, maybeChebi interface{}, mods queries.Applicator) error { + var slice []*Chebi + var object *Chebi + + if singular { + object = maybeChebi.(*Chebi) + } else { + slice = *maybeChebi.(*[]*Chebi) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &chebiR{} + } + args = append(args, object.Accession) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &chebiR{} + } + + for _, a := range args { + if queries.Equal(a, obj.Accession) { + continue Outer + } + } + + args = append(args, obj.Accession) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`chebi`), + qm.WhereIn(`chebi.subclass_of in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load chebi") + } + + var resultSlice []*Chebi + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice chebi") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on chebi") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for chebi") + } + + if len(chebiAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.SubclassOfChebis = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &chebiR{} + } + foreign.R.SubclassOfChebi = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.Accession, foreign.SubclassOf) { + local.R.SubclassOfChebis = append(local.R.SubclassOfChebis, foreign) + if foreign.R == nil { + foreign.R = &chebiR{} + } + foreign.R.SubclassOfChebi = local + break + } + } + } + + return nil +} + +// LoadCompounds allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (chebiL) LoadCompounds(ctx context.Context, e boil.ContextExecutor, singular bool, maybeChebi interface{}, mods queries.Applicator) error { + var slice []*Chebi + var object *Chebi + + if singular { + object = maybeChebi.(*Chebi) + } else { + slice = *maybeChebi.(*[]*Chebi) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &chebiR{} + } + args = append(args, object.Accession) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &chebiR{} + } + + for _, a := range args { + if queries.Equal(a, obj.Accession) { + continue Outer + } + } + + args = append(args, obj.Accession) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`compound`), + qm.WhereIn(`compound.chebi in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load compound") + } + + var resultSlice []*Compound + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice compound") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on compound") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for compound") + } + + if len(compoundAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.Compounds = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &compoundR{} + } + foreign.R.CompoundChebi = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.Accession, foreign.Chebi) { + local.R.Compounds = append(local.R.Compounds, foreign) + if foreign.R == nil { + foreign.R = &compoundR{} + } + foreign.R.CompoundChebi = local + break + } + } + } + + return nil +} + +// SetSubclassOfChebi of the chebi to the related item. +// Sets o.R.SubclassOfChebi to related. +// Adds o to related.R.SubclassOfChebis. +func (o *Chebi) SetSubclassOfChebi(ctx context.Context, exec boil.ContextExecutor, insert bool, related *Chebi) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"chebi\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"subclass_of"}), + strmangle.WhereClause("\"", "\"", 0, chebiPrimaryKeyColumns), + ) + values := []interface{}{related.Accession, o.Accession} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.SubclassOf, related.Accession) + if o.R == nil { + o.R = &chebiR{ + SubclassOfChebi: related, + } + } else { + o.R.SubclassOfChebi = related + } + + if related.R == nil { + related.R = &chebiR{ + SubclassOfChebis: ChebiSlice{o}, + } + } else { + related.R.SubclassOfChebis = append(related.R.SubclassOfChebis, o) + } + + return nil +} + +// RemoveSubclassOfChebi relationship. +// Sets o.R.SubclassOfChebi to nil. +// Removes o from all passed in related items' relationships struct. +func (o *Chebi) RemoveSubclassOfChebi(ctx context.Context, exec boil.ContextExecutor, related *Chebi) error { + var err error + + queries.SetScanner(&o.SubclassOf, nil) + if _, err = o.Update(ctx, exec, boil.Whitelist("subclass_of")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.SubclassOfChebi = nil + } + if related == nil || related.R == nil { + return nil + } + + for i, ri := range related.R.SubclassOfChebis { + if queries.Equal(o.SubclassOf, ri.SubclassOf) { + continue + } + + ln := len(related.R.SubclassOfChebis) + if ln > 1 && i < ln-1 { + related.R.SubclassOfChebis[i] = related.R.SubclassOfChebis[ln-1] + } + related.R.SubclassOfChebis = related.R.SubclassOfChebis[:ln-1] + break + } + return nil +} + +// AddSubclassOfChebis adds the given related objects to the existing relationships +// of the chebi, optionally inserting them as new records. +// Appends related to o.R.SubclassOfChebis. +// Sets related.R.SubclassOfChebi appropriately. +func (o *Chebi) AddSubclassOfChebis(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Chebi) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.SubclassOf, o.Accession) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"chebi\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"subclass_of"}), + strmangle.WhereClause("\"", "\"", 0, chebiPrimaryKeyColumns), + ) + values := []interface{}{o.Accession, rel.Accession} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.SubclassOf, o.Accession) + } + } + + if o.R == nil { + o.R = &chebiR{ + SubclassOfChebis: related, + } + } else { + o.R.SubclassOfChebis = append(o.R.SubclassOfChebis, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &chebiR{ + SubclassOfChebi: o, + } + } else { + rel.R.SubclassOfChebi = o + } + } + return nil +} + +// SetSubclassOfChebis removes all previously related items of the +// chebi replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.SubclassOfChebi's SubclassOfChebis accordingly. +// Replaces o.R.SubclassOfChebis with related. +// Sets related.R.SubclassOfChebi's SubclassOfChebis accordingly. +func (o *Chebi) SetSubclassOfChebis(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Chebi) error { + query := "update \"chebi\" set \"subclass_of\" = null where \"subclass_of\" = ?" + values := []interface{}{o.Accession} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + if o.R != nil { + for _, rel := range o.R.SubclassOfChebis { + queries.SetScanner(&rel.SubclassOf, nil) + if rel.R == nil { + continue + } + + rel.R.SubclassOfChebi = nil + } + o.R.SubclassOfChebis = nil + } + + return o.AddSubclassOfChebis(ctx, exec, insert, related...) +} + +// RemoveSubclassOfChebis relationships from objects passed in. +// Removes related items from R.SubclassOfChebis (uses pointer comparison, removal does not keep order) +// Sets related.R.SubclassOfChebi. +func (o *Chebi) RemoveSubclassOfChebis(ctx context.Context, exec boil.ContextExecutor, related ...*Chebi) error { + if len(related) == 0 { + return nil + } + + var err error + for _, rel := range related { + queries.SetScanner(&rel.SubclassOf, nil) + if rel.R != nil { + rel.R.SubclassOfChebi = nil + } + if _, err = rel.Update(ctx, exec, boil.Whitelist("subclass_of")); err != nil { + return err + } + } + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.SubclassOfChebis { + if rel != ri { + continue + } + + ln := len(o.R.SubclassOfChebis) + if ln > 1 && i < ln-1 { + o.R.SubclassOfChebis[i] = o.R.SubclassOfChebis[ln-1] + } + o.R.SubclassOfChebis = o.R.SubclassOfChebis[:ln-1] + break + } + } + + return nil +} + +// AddCompounds adds the given related objects to the existing relationships +// of the chebi, optionally inserting them as new records. +// Appends related to o.R.Compounds. +// Sets related.R.CompoundChebi appropriately. +func (o *Chebi) AddCompounds(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Compound) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.Chebi, o.Accession) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"compound\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"chebi"}), + strmangle.WhereClause("\"", "\"", 0, compoundPrimaryKeyColumns), + ) + values := []interface{}{o.Accession, rel.Accession} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.Chebi, o.Accession) + } + } + + if o.R == nil { + o.R = &chebiR{ + Compounds: related, + } + } else { + o.R.Compounds = append(o.R.Compounds, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &compoundR{ + CompoundChebi: o, + } + } else { + rel.R.CompoundChebi = o + } + } + return nil +} + +// SetCompounds removes all previously related items of the +// chebi replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.CompoundChebi's Compounds accordingly. +// Replaces o.R.Compounds with related. +// Sets related.R.CompoundChebi's Compounds accordingly. +func (o *Chebi) SetCompounds(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Compound) error { + query := "update \"compound\" set \"chebi\" = null where \"chebi\" = ?" + values := []interface{}{o.Accession} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + if o.R != nil { + for _, rel := range o.R.Compounds { + queries.SetScanner(&rel.Chebi, nil) + if rel.R == nil { + continue + } + + rel.R.CompoundChebi = nil + } + o.R.Compounds = nil + } + + return o.AddCompounds(ctx, exec, insert, related...) +} + +// RemoveCompounds relationships from objects passed in. +// Removes related items from R.Compounds (uses pointer comparison, removal does not keep order) +// Sets related.R.CompoundChebi. +func (o *Chebi) RemoveCompounds(ctx context.Context, exec boil.ContextExecutor, related ...*Compound) error { + if len(related) == 0 { + return nil + } + + var err error + for _, rel := range related { + queries.SetScanner(&rel.Chebi, nil) + if rel.R != nil { + rel.R.CompoundChebi = nil + } + if _, err = rel.Update(ctx, exec, boil.Whitelist("chebi")); err != nil { + return err + } + } + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.Compounds { + if rel != ri { + continue + } + + ln := len(o.R.Compounds) + if ln > 1 && i < ln-1 { + o.R.Compounds[i] = o.R.Compounds[ln-1] + } + o.R.Compounds = o.R.Compounds[:ln-1] + break + } + } + + return nil +} + +// Chebis retrieves all the records using an executor. +func Chebis(mods ...qm.QueryMod) chebiQuery { + mods = append(mods, qm.From("\"chebi\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"chebi\".*"}) + } + + return chebiQuery{q} +} + +// FindChebi retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindChebi(ctx context.Context, exec boil.ContextExecutor, accession null.String, selectCols ...string) (*Chebi, error) { + chebiObj := &Chebi{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"chebi\" where \"accession\"=?", sel, + ) + + q := queries.Raw(query, accession) + + err := q.Bind(ctx, exec, chebiObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from chebi") + } + + if err = chebiObj.doAfterSelectHooks(ctx, exec); err != nil { + return chebiObj, err + } + + return chebiObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *Chebi) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no chebi provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(chebiColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + chebiInsertCacheMut.RLock() + cache, cached := chebiInsertCache[key] + chebiInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + chebiAllColumns, + chebiColumnsWithDefault, + chebiColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(chebiType, chebiMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(chebiType, chebiMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"chebi\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"chebi\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into chebi") + } + + if !cached { + chebiInsertCacheMut.Lock() + chebiInsertCache[key] = cache + chebiInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the Chebi. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *Chebi) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + chebiUpdateCacheMut.RLock() + cache, cached := chebiUpdateCache[key] + chebiUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + chebiAllColumns, + chebiPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update chebi, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"chebi\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, chebiPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(chebiType, chebiMapping, append(wl, chebiPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update chebi row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for chebi") + } + + if !cached { + chebiUpdateCacheMut.Lock() + chebiUpdateCache[key] = cache + chebiUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q chebiQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for chebi") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for chebi") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o ChebiSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), chebiPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"chebi\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, chebiPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in chebi slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all chebi") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *Chebi) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no chebi provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(chebiColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + chebiUpsertCacheMut.RLock() + cache, cached := chebiUpsertCache[key] + chebiUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + chebiAllColumns, + chebiColumnsWithDefault, + chebiColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + chebiAllColumns, + chebiPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert chebi, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(chebiPrimaryKeyColumns)) + copy(conflict, chebiPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"chebi\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(chebiType, chebiMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(chebiType, chebiMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert chebi") + } + + if !cached { + chebiUpsertCacheMut.Lock() + chebiUpsertCache[key] = cache + chebiUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single Chebi record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *Chebi) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no Chebi provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), chebiPrimaryKeyMapping) + sql := "DELETE FROM \"chebi\" WHERE \"accession\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from chebi") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for chebi") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q chebiQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no chebiQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from chebi") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for chebi") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o ChebiSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(chebiBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), chebiPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"chebi\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, chebiPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from chebi slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for chebi") + } + + if len(chebiAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *Chebi) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindChebi(ctx, exec, o.Accession) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *ChebiSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := ChebiSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), chebiPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"chebi\".* FROM \"chebi\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, chebiPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in ChebiSlice") + } + + *o = slice + + return nil +} + +// ChebiExists checks if the Chebi row exists. +func ChebiExists(ctx context.Context, exec boil.ContextExecutor, accession null.String) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"chebi\" where \"accession\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, accession) + } + row := exec.QueryRowContext(ctx, sql, accession) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if chebi exists") + } + + return exists, nil +} diff --git a/models/chembl_id_lookup.go b/models/chembl_id_lookup.go new file mode 100644 index 0000000..ec3b40e --- /dev/null +++ b/models/chembl_id_lookup.go @@ -0,0 +1,1968 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// ChemblIDLookup is an object representing the database table. +type ChemblIDLookup struct { + ChemblID string `boil:"chembl_id" json:"chembl_id" toml:"chembl_id" yaml:"chembl_id"` + EntityType string `boil:"entity_type" json:"entity_type" toml:"entity_type" yaml:"entity_type"` + EntityID int64 `boil:"entity_id" json:"entity_id" toml:"entity_id" yaml:"entity_id"` + Status string `boil:"status" json:"status" toml:"status" yaml:"status"` + LastActive null.Int64 `boil:"last_active" json:"last_active,omitempty" toml:"last_active" yaml:"last_active,omitempty"` + + R *chemblIDLookupR `boil:"-" json:"-" toml:"-" yaml:"-"` + L chemblIDLookupL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var ChemblIDLookupColumns = struct { + ChemblID string + EntityType string + EntityID string + Status string + LastActive string +}{ + ChemblID: "chembl_id", + EntityType: "entity_type", + EntityID: "entity_id", + Status: "status", + LastActive: "last_active", +} + +var ChemblIDLookupTableColumns = struct { + ChemblID string + EntityType string + EntityID string + Status string + LastActive string +}{ + ChemblID: "chembl_id_lookup.chembl_id", + EntityType: "chembl_id_lookup.entity_type", + EntityID: "chembl_id_lookup.entity_id", + Status: "chembl_id_lookup.status", + LastActive: "chembl_id_lookup.last_active", +} + +// Generated where + +var ChemblIDLookupWhere = struct { + ChemblID whereHelperstring + EntityType whereHelperstring + EntityID whereHelperint64 + Status whereHelperstring + LastActive whereHelpernull_Int64 +}{ + ChemblID: whereHelperstring{field: "\"chembl_id_lookup\".\"chembl_id\""}, + EntityType: whereHelperstring{field: "\"chembl_id_lookup\".\"entity_type\""}, + EntityID: whereHelperint64{field: "\"chembl_id_lookup\".\"entity_id\""}, + Status: whereHelperstring{field: "\"chembl_id_lookup\".\"status\""}, + LastActive: whereHelpernull_Int64{field: "\"chembl_id_lookup\".\"last_active\""}, +} + +// ChemblIDLookupRels is where relationship names are stored. +var ChemblIDLookupRels = struct { + ChemblAssay string + ChemblCellDictionary string + ChemblDoc string + ChemblMoleculeDictionary string + ChemblTargetDictionary string + ChemblTissueDictionary string +}{ + ChemblAssay: "ChemblAssay", + ChemblCellDictionary: "ChemblCellDictionary", + ChemblDoc: "ChemblDoc", + ChemblMoleculeDictionary: "ChemblMoleculeDictionary", + ChemblTargetDictionary: "ChemblTargetDictionary", + ChemblTissueDictionary: "ChemblTissueDictionary", +} + +// chemblIDLookupR is where relationships are stored. +type chemblIDLookupR struct { + ChemblAssay *Assay `boil:"ChemblAssay" json:"ChemblAssay" toml:"ChemblAssay" yaml:"ChemblAssay"` + ChemblCellDictionary *CellDictionary `boil:"ChemblCellDictionary" json:"ChemblCellDictionary" toml:"ChemblCellDictionary" yaml:"ChemblCellDictionary"` + ChemblDoc *Doc `boil:"ChemblDoc" json:"ChemblDoc" toml:"ChemblDoc" yaml:"ChemblDoc"` + ChemblMoleculeDictionary *MoleculeDictionary `boil:"ChemblMoleculeDictionary" json:"ChemblMoleculeDictionary" toml:"ChemblMoleculeDictionary" yaml:"ChemblMoleculeDictionary"` + ChemblTargetDictionary *TargetDictionary `boil:"ChemblTargetDictionary" json:"ChemblTargetDictionary" toml:"ChemblTargetDictionary" yaml:"ChemblTargetDictionary"` + ChemblTissueDictionary *TissueDictionary `boil:"ChemblTissueDictionary" json:"ChemblTissueDictionary" toml:"ChemblTissueDictionary" yaml:"ChemblTissueDictionary"` +} + +// NewStruct creates a new relationship struct +func (*chemblIDLookupR) NewStruct() *chemblIDLookupR { + return &chemblIDLookupR{} +} + +func (r *chemblIDLookupR) GetChemblAssay() *Assay { + if r == nil { + return nil + } + return r.ChemblAssay +} + +func (r *chemblIDLookupR) GetChemblCellDictionary() *CellDictionary { + if r == nil { + return nil + } + return r.ChemblCellDictionary +} + +func (r *chemblIDLookupR) GetChemblDoc() *Doc { + if r == nil { + return nil + } + return r.ChemblDoc +} + +func (r *chemblIDLookupR) GetChemblMoleculeDictionary() *MoleculeDictionary { + if r == nil { + return nil + } + return r.ChemblMoleculeDictionary +} + +func (r *chemblIDLookupR) GetChemblTargetDictionary() *TargetDictionary { + if r == nil { + return nil + } + return r.ChemblTargetDictionary +} + +func (r *chemblIDLookupR) GetChemblTissueDictionary() *TissueDictionary { + if r == nil { + return nil + } + return r.ChemblTissueDictionary +} + +// chemblIDLookupL is where Load methods for each relationship are stored. +type chemblIDLookupL struct{} + +var ( + chemblIDLookupAllColumns = []string{"chembl_id", "entity_type", "entity_id", "status", "last_active"} + chemblIDLookupColumnsWithoutDefault = []string{"chembl_id", "entity_type", "entity_id", "status"} + chemblIDLookupColumnsWithDefault = []string{"last_active"} + chemblIDLookupPrimaryKeyColumns = []string{"chembl_id"} + chemblIDLookupGeneratedColumns = []string{} +) + +type ( + // ChemblIDLookupSlice is an alias for a slice of pointers to ChemblIDLookup. + // This should almost always be used instead of []ChemblIDLookup. + ChemblIDLookupSlice []*ChemblIDLookup + // ChemblIDLookupHook is the signature for custom ChemblIDLookup hook methods + ChemblIDLookupHook func(context.Context, boil.ContextExecutor, *ChemblIDLookup) error + + chemblIDLookupQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + chemblIDLookupType = reflect.TypeOf(&ChemblIDLookup{}) + chemblIDLookupMapping = queries.MakeStructMapping(chemblIDLookupType) + chemblIDLookupPrimaryKeyMapping, _ = queries.BindMapping(chemblIDLookupType, chemblIDLookupMapping, chemblIDLookupPrimaryKeyColumns) + chemblIDLookupInsertCacheMut sync.RWMutex + chemblIDLookupInsertCache = make(map[string]insertCache) + chemblIDLookupUpdateCacheMut sync.RWMutex + chemblIDLookupUpdateCache = make(map[string]updateCache) + chemblIDLookupUpsertCacheMut sync.RWMutex + chemblIDLookupUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var chemblIDLookupAfterSelectHooks []ChemblIDLookupHook + +var chemblIDLookupBeforeInsertHooks []ChemblIDLookupHook +var chemblIDLookupAfterInsertHooks []ChemblIDLookupHook + +var chemblIDLookupBeforeUpdateHooks []ChemblIDLookupHook +var chemblIDLookupAfterUpdateHooks []ChemblIDLookupHook + +var chemblIDLookupBeforeDeleteHooks []ChemblIDLookupHook +var chemblIDLookupAfterDeleteHooks []ChemblIDLookupHook + +var chemblIDLookupBeforeUpsertHooks []ChemblIDLookupHook +var chemblIDLookupAfterUpsertHooks []ChemblIDLookupHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *ChemblIDLookup) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range chemblIDLookupAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *ChemblIDLookup) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range chemblIDLookupBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *ChemblIDLookup) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range chemblIDLookupAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *ChemblIDLookup) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range chemblIDLookupBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *ChemblIDLookup) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range chemblIDLookupAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *ChemblIDLookup) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range chemblIDLookupBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *ChemblIDLookup) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range chemblIDLookupAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *ChemblIDLookup) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range chemblIDLookupBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *ChemblIDLookup) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range chemblIDLookupAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddChemblIDLookupHook registers your hook function for all future operations. +func AddChemblIDLookupHook(hookPoint boil.HookPoint, chemblIDLookupHook ChemblIDLookupHook) { + switch hookPoint { + case boil.AfterSelectHook: + chemblIDLookupAfterSelectHooks = append(chemblIDLookupAfterSelectHooks, chemblIDLookupHook) + case boil.BeforeInsertHook: + chemblIDLookupBeforeInsertHooks = append(chemblIDLookupBeforeInsertHooks, chemblIDLookupHook) + case boil.AfterInsertHook: + chemblIDLookupAfterInsertHooks = append(chemblIDLookupAfterInsertHooks, chemblIDLookupHook) + case boil.BeforeUpdateHook: + chemblIDLookupBeforeUpdateHooks = append(chemblIDLookupBeforeUpdateHooks, chemblIDLookupHook) + case boil.AfterUpdateHook: + chemblIDLookupAfterUpdateHooks = append(chemblIDLookupAfterUpdateHooks, chemblIDLookupHook) + case boil.BeforeDeleteHook: + chemblIDLookupBeforeDeleteHooks = append(chemblIDLookupBeforeDeleteHooks, chemblIDLookupHook) + case boil.AfterDeleteHook: + chemblIDLookupAfterDeleteHooks = append(chemblIDLookupAfterDeleteHooks, chemblIDLookupHook) + case boil.BeforeUpsertHook: + chemblIDLookupBeforeUpsertHooks = append(chemblIDLookupBeforeUpsertHooks, chemblIDLookupHook) + case boil.AfterUpsertHook: + chemblIDLookupAfterUpsertHooks = append(chemblIDLookupAfterUpsertHooks, chemblIDLookupHook) + } +} + +// One returns a single chemblIDLookup record from the query. +func (q chemblIDLookupQuery) One(ctx context.Context, exec boil.ContextExecutor) (*ChemblIDLookup, error) { + o := &ChemblIDLookup{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for chembl_id_lookup") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all ChemblIDLookup records from the query. +func (q chemblIDLookupQuery) All(ctx context.Context, exec boil.ContextExecutor) (ChemblIDLookupSlice, error) { + var o []*ChemblIDLookup + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to ChemblIDLookup slice") + } + + if len(chemblIDLookupAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all ChemblIDLookup records in the query. +func (q chemblIDLookupQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count chembl_id_lookup rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q chemblIDLookupQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if chembl_id_lookup exists") + } + + return count > 0, nil +} + +// ChemblAssay pointed to by the foreign key. +func (o *ChemblIDLookup) ChemblAssay(mods ...qm.QueryMod) assayQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"chembl_id\" = ?", o.ChemblID), + } + + queryMods = append(queryMods, mods...) + + return Assays(queryMods...) +} + +// ChemblCellDictionary pointed to by the foreign key. +func (o *ChemblIDLookup) ChemblCellDictionary(mods ...qm.QueryMod) cellDictionaryQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"chembl_id\" = ?", o.ChemblID), + } + + queryMods = append(queryMods, mods...) + + return CellDictionaries(queryMods...) +} + +// ChemblDoc pointed to by the foreign key. +func (o *ChemblIDLookup) ChemblDoc(mods ...qm.QueryMod) docQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"chembl_id\" = ?", o.ChemblID), + } + + queryMods = append(queryMods, mods...) + + return Docs(queryMods...) +} + +// ChemblMoleculeDictionary pointed to by the foreign key. +func (o *ChemblIDLookup) ChemblMoleculeDictionary(mods ...qm.QueryMod) moleculeDictionaryQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"chembl_id\" = ?", o.ChemblID), + } + + queryMods = append(queryMods, mods...) + + return MoleculeDictionaries(queryMods...) +} + +// ChemblTargetDictionary pointed to by the foreign key. +func (o *ChemblIDLookup) ChemblTargetDictionary(mods ...qm.QueryMod) targetDictionaryQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"chembl_id\" = ?", o.ChemblID), + } + + queryMods = append(queryMods, mods...) + + return TargetDictionaries(queryMods...) +} + +// ChemblTissueDictionary pointed to by the foreign key. +func (o *ChemblIDLookup) ChemblTissueDictionary(mods ...qm.QueryMod) tissueDictionaryQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"chembl_id\" = ?", o.ChemblID), + } + + queryMods = append(queryMods, mods...) + + return TissueDictionaries(queryMods...) +} + +// LoadChemblAssay allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-1 relationship. +func (chemblIDLookupL) LoadChemblAssay(ctx context.Context, e boil.ContextExecutor, singular bool, maybeChemblIDLookup interface{}, mods queries.Applicator) error { + var slice []*ChemblIDLookup + var object *ChemblIDLookup + + if singular { + object = maybeChemblIDLookup.(*ChemblIDLookup) + } else { + slice = *maybeChemblIDLookup.(*[]*ChemblIDLookup) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &chemblIDLookupR{} + } + args = append(args, object.ChemblID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &chemblIDLookupR{} + } + + for _, a := range args { + if a == obj.ChemblID { + continue Outer + } + } + + args = append(args, obj.ChemblID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`assays`), + qm.WhereIn(`assays.chembl_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load Assay") + } + + var resultSlice []*Assay + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice Assay") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for assays") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for assays") + } + + if len(chemblIDLookupAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.ChemblAssay = foreign + if foreign.R == nil { + foreign.R = &assayR{} + } + foreign.R.Chembl = object + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.ChemblID == foreign.ChemblID { + local.R.ChemblAssay = foreign + if foreign.R == nil { + foreign.R = &assayR{} + } + foreign.R.Chembl = local + break + } + } + } + + return nil +} + +// LoadChemblCellDictionary allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-1 relationship. +func (chemblIDLookupL) LoadChemblCellDictionary(ctx context.Context, e boil.ContextExecutor, singular bool, maybeChemblIDLookup interface{}, mods queries.Applicator) error { + var slice []*ChemblIDLookup + var object *ChemblIDLookup + + if singular { + object = maybeChemblIDLookup.(*ChemblIDLookup) + } else { + slice = *maybeChemblIDLookup.(*[]*ChemblIDLookup) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &chemblIDLookupR{} + } + args = append(args, object.ChemblID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &chemblIDLookupR{} + } + + for _, a := range args { + if queries.Equal(a, obj.ChemblID) { + continue Outer + } + } + + args = append(args, obj.ChemblID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`cell_dictionary`), + qm.WhereIn(`cell_dictionary.chembl_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load CellDictionary") + } + + var resultSlice []*CellDictionary + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice CellDictionary") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for cell_dictionary") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for cell_dictionary") + } + + if len(chemblIDLookupAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.ChemblCellDictionary = foreign + if foreign.R == nil { + foreign.R = &cellDictionaryR{} + } + foreign.R.Chembl = object + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.ChemblID, foreign.ChemblID) { + local.R.ChemblCellDictionary = foreign + if foreign.R == nil { + foreign.R = &cellDictionaryR{} + } + foreign.R.Chembl = local + break + } + } + } + + return nil +} + +// LoadChemblDoc allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-1 relationship. +func (chemblIDLookupL) LoadChemblDoc(ctx context.Context, e boil.ContextExecutor, singular bool, maybeChemblIDLookup interface{}, mods queries.Applicator) error { + var slice []*ChemblIDLookup + var object *ChemblIDLookup + + if singular { + object = maybeChemblIDLookup.(*ChemblIDLookup) + } else { + slice = *maybeChemblIDLookup.(*[]*ChemblIDLookup) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &chemblIDLookupR{} + } + args = append(args, object.ChemblID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &chemblIDLookupR{} + } + + for _, a := range args { + if a == obj.ChemblID { + continue Outer + } + } + + args = append(args, obj.ChemblID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`docs`), + qm.WhereIn(`docs.chembl_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load Doc") + } + + var resultSlice []*Doc + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice Doc") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for docs") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for docs") + } + + if len(chemblIDLookupAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.ChemblDoc = foreign + if foreign.R == nil { + foreign.R = &docR{} + } + foreign.R.Chembl = object + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.ChemblID == foreign.ChemblID { + local.R.ChemblDoc = foreign + if foreign.R == nil { + foreign.R = &docR{} + } + foreign.R.Chembl = local + break + } + } + } + + return nil +} + +// LoadChemblMoleculeDictionary allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-1 relationship. +func (chemblIDLookupL) LoadChemblMoleculeDictionary(ctx context.Context, e boil.ContextExecutor, singular bool, maybeChemblIDLookup interface{}, mods queries.Applicator) error { + var slice []*ChemblIDLookup + var object *ChemblIDLookup + + if singular { + object = maybeChemblIDLookup.(*ChemblIDLookup) + } else { + slice = *maybeChemblIDLookup.(*[]*ChemblIDLookup) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &chemblIDLookupR{} + } + args = append(args, object.ChemblID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &chemblIDLookupR{} + } + + for _, a := range args { + if a == obj.ChemblID { + continue Outer + } + } + + args = append(args, obj.ChemblID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`molecule_dictionary`), + qm.WhereIn(`molecule_dictionary.chembl_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load MoleculeDictionary") + } + + var resultSlice []*MoleculeDictionary + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice MoleculeDictionary") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for molecule_dictionary") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for molecule_dictionary") + } + + if len(chemblIDLookupAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.ChemblMoleculeDictionary = foreign + if foreign.R == nil { + foreign.R = &moleculeDictionaryR{} + } + foreign.R.Chembl = object + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.ChemblID == foreign.ChemblID { + local.R.ChemblMoleculeDictionary = foreign + if foreign.R == nil { + foreign.R = &moleculeDictionaryR{} + } + foreign.R.Chembl = local + break + } + } + } + + return nil +} + +// LoadChemblTargetDictionary allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-1 relationship. +func (chemblIDLookupL) LoadChemblTargetDictionary(ctx context.Context, e boil.ContextExecutor, singular bool, maybeChemblIDLookup interface{}, mods queries.Applicator) error { + var slice []*ChemblIDLookup + var object *ChemblIDLookup + + if singular { + object = maybeChemblIDLookup.(*ChemblIDLookup) + } else { + slice = *maybeChemblIDLookup.(*[]*ChemblIDLookup) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &chemblIDLookupR{} + } + args = append(args, object.ChemblID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &chemblIDLookupR{} + } + + for _, a := range args { + if a == obj.ChemblID { + continue Outer + } + } + + args = append(args, obj.ChemblID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`target_dictionary`), + qm.WhereIn(`target_dictionary.chembl_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load TargetDictionary") + } + + var resultSlice []*TargetDictionary + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice TargetDictionary") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for target_dictionary") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for target_dictionary") + } + + if len(chemblIDLookupAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.ChemblTargetDictionary = foreign + if foreign.R == nil { + foreign.R = &targetDictionaryR{} + } + foreign.R.Chembl = object + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.ChemblID == foreign.ChemblID { + local.R.ChemblTargetDictionary = foreign + if foreign.R == nil { + foreign.R = &targetDictionaryR{} + } + foreign.R.Chembl = local + break + } + } + } + + return nil +} + +// LoadChemblTissueDictionary allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-1 relationship. +func (chemblIDLookupL) LoadChemblTissueDictionary(ctx context.Context, e boil.ContextExecutor, singular bool, maybeChemblIDLookup interface{}, mods queries.Applicator) error { + var slice []*ChemblIDLookup + var object *ChemblIDLookup + + if singular { + object = maybeChemblIDLookup.(*ChemblIDLookup) + } else { + slice = *maybeChemblIDLookup.(*[]*ChemblIDLookup) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &chemblIDLookupR{} + } + args = append(args, object.ChemblID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &chemblIDLookupR{} + } + + for _, a := range args { + if a == obj.ChemblID { + continue Outer + } + } + + args = append(args, obj.ChemblID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`tissue_dictionary`), + qm.WhereIn(`tissue_dictionary.chembl_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load TissueDictionary") + } + + var resultSlice []*TissueDictionary + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice TissueDictionary") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for tissue_dictionary") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for tissue_dictionary") + } + + if len(chemblIDLookupAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.ChemblTissueDictionary = foreign + if foreign.R == nil { + foreign.R = &tissueDictionaryR{} + } + foreign.R.Chembl = object + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.ChemblID == foreign.ChemblID { + local.R.ChemblTissueDictionary = foreign + if foreign.R == nil { + foreign.R = &tissueDictionaryR{} + } + foreign.R.Chembl = local + break + } + } + } + + return nil +} + +// SetChemblAssay of the chemblIDLookup to the related item. +// Sets o.R.ChemblAssay to related. +// Adds o to related.R.Chembl. +func (o *ChemblIDLookup) SetChemblAssay(ctx context.Context, exec boil.ContextExecutor, insert bool, related *Assay) error { + var err error + + if insert { + related.ChemblID = o.ChemblID + + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"assays\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"chembl_id"}), + strmangle.WhereClause("\"", "\"", 0, assayPrimaryKeyColumns), + ) + values := []interface{}{o.ChemblID, related.AssayID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + related.ChemblID = o.ChemblID + } + + if o.R == nil { + o.R = &chemblIDLookupR{ + ChemblAssay: related, + } + } else { + o.R.ChemblAssay = related + } + + if related.R == nil { + related.R = &assayR{ + Chembl: o, + } + } else { + related.R.Chembl = o + } + return nil +} + +// SetChemblCellDictionary of the chemblIDLookup to the related item. +// Sets o.R.ChemblCellDictionary to related. +// Adds o to related.R.Chembl. +func (o *ChemblIDLookup) SetChemblCellDictionary(ctx context.Context, exec boil.ContextExecutor, insert bool, related *CellDictionary) error { + var err error + + if insert { + queries.Assign(&related.ChemblID, o.ChemblID) + + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"cell_dictionary\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"chembl_id"}), + strmangle.WhereClause("\"", "\"", 0, cellDictionaryPrimaryKeyColumns), + ) + values := []interface{}{o.ChemblID, related.CellID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&related.ChemblID, o.ChemblID) + } + + if o.R == nil { + o.R = &chemblIDLookupR{ + ChemblCellDictionary: related, + } + } else { + o.R.ChemblCellDictionary = related + } + + if related.R == nil { + related.R = &cellDictionaryR{ + Chembl: o, + } + } else { + related.R.Chembl = o + } + return nil +} + +// RemoveChemblCellDictionary relationship. +// Sets o.R.ChemblCellDictionary to nil. +// Removes o from all passed in related items' relationships struct. +func (o *ChemblIDLookup) RemoveChemblCellDictionary(ctx context.Context, exec boil.ContextExecutor, related *CellDictionary) error { + var err error + + queries.SetScanner(&related.ChemblID, nil) + if _, err = related.Update(ctx, exec, boil.Whitelist("chembl_id")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.ChemblCellDictionary = nil + } + + if related == nil || related.R == nil { + return nil + } + + related.R.Chembl = nil + + return nil +} + +// SetChemblDoc of the chemblIDLookup to the related item. +// Sets o.R.ChemblDoc to related. +// Adds o to related.R.Chembl. +func (o *ChemblIDLookup) SetChemblDoc(ctx context.Context, exec boil.ContextExecutor, insert bool, related *Doc) error { + var err error + + if insert { + related.ChemblID = o.ChemblID + + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"docs\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"chembl_id"}), + strmangle.WhereClause("\"", "\"", 0, docPrimaryKeyColumns), + ) + values := []interface{}{o.ChemblID, related.DocID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + related.ChemblID = o.ChemblID + } + + if o.R == nil { + o.R = &chemblIDLookupR{ + ChemblDoc: related, + } + } else { + o.R.ChemblDoc = related + } + + if related.R == nil { + related.R = &docR{ + Chembl: o, + } + } else { + related.R.Chembl = o + } + return nil +} + +// SetChemblMoleculeDictionary of the chemblIDLookup to the related item. +// Sets o.R.ChemblMoleculeDictionary to related. +// Adds o to related.R.Chembl. +func (o *ChemblIDLookup) SetChemblMoleculeDictionary(ctx context.Context, exec boil.ContextExecutor, insert bool, related *MoleculeDictionary) error { + var err error + + if insert { + related.ChemblID = o.ChemblID + + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"molecule_dictionary\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"chembl_id"}), + strmangle.WhereClause("\"", "\"", 0, moleculeDictionaryPrimaryKeyColumns), + ) + values := []interface{}{o.ChemblID, related.Molregno} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + related.ChemblID = o.ChemblID + } + + if o.R == nil { + o.R = &chemblIDLookupR{ + ChemblMoleculeDictionary: related, + } + } else { + o.R.ChemblMoleculeDictionary = related + } + + if related.R == nil { + related.R = &moleculeDictionaryR{ + Chembl: o, + } + } else { + related.R.Chembl = o + } + return nil +} + +// SetChemblTargetDictionary of the chemblIDLookup to the related item. +// Sets o.R.ChemblTargetDictionary to related. +// Adds o to related.R.Chembl. +func (o *ChemblIDLookup) SetChemblTargetDictionary(ctx context.Context, exec boil.ContextExecutor, insert bool, related *TargetDictionary) error { + var err error + + if insert { + related.ChemblID = o.ChemblID + + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"target_dictionary\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"chembl_id"}), + strmangle.WhereClause("\"", "\"", 0, targetDictionaryPrimaryKeyColumns), + ) + values := []interface{}{o.ChemblID, related.Tid} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + related.ChemblID = o.ChemblID + } + + if o.R == nil { + o.R = &chemblIDLookupR{ + ChemblTargetDictionary: related, + } + } else { + o.R.ChemblTargetDictionary = related + } + + if related.R == nil { + related.R = &targetDictionaryR{ + Chembl: o, + } + } else { + related.R.Chembl = o + } + return nil +} + +// SetChemblTissueDictionary of the chemblIDLookup to the related item. +// Sets o.R.ChemblTissueDictionary to related. +// Adds o to related.R.Chembl. +func (o *ChemblIDLookup) SetChemblTissueDictionary(ctx context.Context, exec boil.ContextExecutor, insert bool, related *TissueDictionary) error { + var err error + + if insert { + related.ChemblID = o.ChemblID + + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"tissue_dictionary\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"chembl_id"}), + strmangle.WhereClause("\"", "\"", 0, tissueDictionaryPrimaryKeyColumns), + ) + values := []interface{}{o.ChemblID, related.TissueID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + related.ChemblID = o.ChemblID + } + + if o.R == nil { + o.R = &chemblIDLookupR{ + ChemblTissueDictionary: related, + } + } else { + o.R.ChemblTissueDictionary = related + } + + if related.R == nil { + related.R = &tissueDictionaryR{ + Chembl: o, + } + } else { + related.R.Chembl = o + } + return nil +} + +// ChemblIDLookups retrieves all the records using an executor. +func ChemblIDLookups(mods ...qm.QueryMod) chemblIDLookupQuery { + mods = append(mods, qm.From("\"chembl_id_lookup\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"chembl_id_lookup\".*"}) + } + + return chemblIDLookupQuery{q} +} + +// FindChemblIDLookup retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindChemblIDLookup(ctx context.Context, exec boil.ContextExecutor, chemblID string, selectCols ...string) (*ChemblIDLookup, error) { + chemblIDLookupObj := &ChemblIDLookup{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"chembl_id_lookup\" where \"chembl_id\"=?", sel, + ) + + q := queries.Raw(query, chemblID) + + err := q.Bind(ctx, exec, chemblIDLookupObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from chembl_id_lookup") + } + + if err = chemblIDLookupObj.doAfterSelectHooks(ctx, exec); err != nil { + return chemblIDLookupObj, err + } + + return chemblIDLookupObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *ChemblIDLookup) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no chembl_id_lookup provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(chemblIDLookupColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + chemblIDLookupInsertCacheMut.RLock() + cache, cached := chemblIDLookupInsertCache[key] + chemblIDLookupInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + chemblIDLookupAllColumns, + chemblIDLookupColumnsWithDefault, + chemblIDLookupColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(chemblIDLookupType, chemblIDLookupMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(chemblIDLookupType, chemblIDLookupMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"chembl_id_lookup\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"chembl_id_lookup\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into chembl_id_lookup") + } + + if !cached { + chemblIDLookupInsertCacheMut.Lock() + chemblIDLookupInsertCache[key] = cache + chemblIDLookupInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the ChemblIDLookup. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *ChemblIDLookup) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + chemblIDLookupUpdateCacheMut.RLock() + cache, cached := chemblIDLookupUpdateCache[key] + chemblIDLookupUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + chemblIDLookupAllColumns, + chemblIDLookupPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update chembl_id_lookup, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"chembl_id_lookup\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, chemblIDLookupPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(chemblIDLookupType, chemblIDLookupMapping, append(wl, chemblIDLookupPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update chembl_id_lookup row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for chembl_id_lookup") + } + + if !cached { + chemblIDLookupUpdateCacheMut.Lock() + chemblIDLookupUpdateCache[key] = cache + chemblIDLookupUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q chemblIDLookupQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for chembl_id_lookup") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for chembl_id_lookup") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o ChemblIDLookupSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), chemblIDLookupPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"chembl_id_lookup\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, chemblIDLookupPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in chemblIDLookup slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all chemblIDLookup") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *ChemblIDLookup) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no chembl_id_lookup provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(chemblIDLookupColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + chemblIDLookupUpsertCacheMut.RLock() + cache, cached := chemblIDLookupUpsertCache[key] + chemblIDLookupUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + chemblIDLookupAllColumns, + chemblIDLookupColumnsWithDefault, + chemblIDLookupColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + chemblIDLookupAllColumns, + chemblIDLookupPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert chembl_id_lookup, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(chemblIDLookupPrimaryKeyColumns)) + copy(conflict, chemblIDLookupPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"chembl_id_lookup\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(chemblIDLookupType, chemblIDLookupMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(chemblIDLookupType, chemblIDLookupMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert chembl_id_lookup") + } + + if !cached { + chemblIDLookupUpsertCacheMut.Lock() + chemblIDLookupUpsertCache[key] = cache + chemblIDLookupUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single ChemblIDLookup record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *ChemblIDLookup) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no ChemblIDLookup provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), chemblIDLookupPrimaryKeyMapping) + sql := "DELETE FROM \"chembl_id_lookup\" WHERE \"chembl_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from chembl_id_lookup") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for chembl_id_lookup") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q chemblIDLookupQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no chemblIDLookupQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from chembl_id_lookup") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for chembl_id_lookup") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o ChemblIDLookupSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(chemblIDLookupBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), chemblIDLookupPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"chembl_id_lookup\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, chemblIDLookupPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from chemblIDLookup slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for chembl_id_lookup") + } + + if len(chemblIDLookupAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *ChemblIDLookup) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindChemblIDLookup(ctx, exec, o.ChemblID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *ChemblIDLookupSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := ChemblIDLookupSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), chemblIDLookupPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"chembl_id_lookup\".* FROM \"chembl_id_lookup\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, chemblIDLookupPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in ChemblIDLookupSlice") + } + + *o = slice + + return nil +} + +// ChemblIDLookupExists checks if the ChemblIDLookup row exists. +func ChemblIDLookupExists(ctx context.Context, exec boil.ContextExecutor, chemblID string) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"chembl_id_lookup\" where \"chembl_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, chemblID) + } + row := exec.QueryRowContext(ctx, sql, chemblID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if chembl_id_lookup exists") + } + + return exists, nil +} diff --git a/models/component_class.go b/models/component_class.go new file mode 100644 index 0000000..1bc37be --- /dev/null +++ b/models/component_class.go @@ -0,0 +1,1241 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// ComponentClass is an object representing the database table. +type ComponentClass struct { + ComponentID int64 `boil:"component_id" json:"component_id" toml:"component_id" yaml:"component_id"` + ProteinClassID int64 `boil:"protein_class_id" json:"protein_class_id" toml:"protein_class_id" yaml:"protein_class_id"` + CompClassID int64 `boil:"comp_class_id" json:"comp_class_id" toml:"comp_class_id" yaml:"comp_class_id"` + + R *componentClassR `boil:"-" json:"-" toml:"-" yaml:"-"` + L componentClassL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var ComponentClassColumns = struct { + ComponentID string + ProteinClassID string + CompClassID string +}{ + ComponentID: "component_id", + ProteinClassID: "protein_class_id", + CompClassID: "comp_class_id", +} + +var ComponentClassTableColumns = struct { + ComponentID string + ProteinClassID string + CompClassID string +}{ + ComponentID: "component_class.component_id", + ProteinClassID: "component_class.protein_class_id", + CompClassID: "component_class.comp_class_id", +} + +// Generated where + +var ComponentClassWhere = struct { + ComponentID whereHelperint64 + ProteinClassID whereHelperint64 + CompClassID whereHelperint64 +}{ + ComponentID: whereHelperint64{field: "\"component_class\".\"component_id\""}, + ProteinClassID: whereHelperint64{field: "\"component_class\".\"protein_class_id\""}, + CompClassID: whereHelperint64{field: "\"component_class\".\"comp_class_id\""}, +} + +// ComponentClassRels is where relationship names are stored. +var ComponentClassRels = struct { + ProteinClass string + Component string +}{ + ProteinClass: "ProteinClass", + Component: "Component", +} + +// componentClassR is where relationships are stored. +type componentClassR struct { + ProteinClass *ProteinClassification `boil:"ProteinClass" json:"ProteinClass" toml:"ProteinClass" yaml:"ProteinClass"` + Component *ComponentSequence `boil:"Component" json:"Component" toml:"Component" yaml:"Component"` +} + +// NewStruct creates a new relationship struct +func (*componentClassR) NewStruct() *componentClassR { + return &componentClassR{} +} + +func (r *componentClassR) GetProteinClass() *ProteinClassification { + if r == nil { + return nil + } + return r.ProteinClass +} + +func (r *componentClassR) GetComponent() *ComponentSequence { + if r == nil { + return nil + } + return r.Component +} + +// componentClassL is where Load methods for each relationship are stored. +type componentClassL struct{} + +var ( + componentClassAllColumns = []string{"component_id", "protein_class_id", "comp_class_id"} + componentClassColumnsWithoutDefault = []string{"component_id", "protein_class_id", "comp_class_id"} + componentClassColumnsWithDefault = []string{} + componentClassPrimaryKeyColumns = []string{"comp_class_id"} + componentClassGeneratedColumns = []string{} +) + +type ( + // ComponentClassSlice is an alias for a slice of pointers to ComponentClass. + // This should almost always be used instead of []ComponentClass. + ComponentClassSlice []*ComponentClass + // ComponentClassHook is the signature for custom ComponentClass hook methods + ComponentClassHook func(context.Context, boil.ContextExecutor, *ComponentClass) error + + componentClassQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + componentClassType = reflect.TypeOf(&ComponentClass{}) + componentClassMapping = queries.MakeStructMapping(componentClassType) + componentClassPrimaryKeyMapping, _ = queries.BindMapping(componentClassType, componentClassMapping, componentClassPrimaryKeyColumns) + componentClassInsertCacheMut sync.RWMutex + componentClassInsertCache = make(map[string]insertCache) + componentClassUpdateCacheMut sync.RWMutex + componentClassUpdateCache = make(map[string]updateCache) + componentClassUpsertCacheMut sync.RWMutex + componentClassUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var componentClassAfterSelectHooks []ComponentClassHook + +var componentClassBeforeInsertHooks []ComponentClassHook +var componentClassAfterInsertHooks []ComponentClassHook + +var componentClassBeforeUpdateHooks []ComponentClassHook +var componentClassAfterUpdateHooks []ComponentClassHook + +var componentClassBeforeDeleteHooks []ComponentClassHook +var componentClassAfterDeleteHooks []ComponentClassHook + +var componentClassBeforeUpsertHooks []ComponentClassHook +var componentClassAfterUpsertHooks []ComponentClassHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *ComponentClass) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range componentClassAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *ComponentClass) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range componentClassBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *ComponentClass) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range componentClassAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *ComponentClass) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range componentClassBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *ComponentClass) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range componentClassAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *ComponentClass) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range componentClassBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *ComponentClass) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range componentClassAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *ComponentClass) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range componentClassBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *ComponentClass) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range componentClassAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddComponentClassHook registers your hook function for all future operations. +func AddComponentClassHook(hookPoint boil.HookPoint, componentClassHook ComponentClassHook) { + switch hookPoint { + case boil.AfterSelectHook: + componentClassAfterSelectHooks = append(componentClassAfterSelectHooks, componentClassHook) + case boil.BeforeInsertHook: + componentClassBeforeInsertHooks = append(componentClassBeforeInsertHooks, componentClassHook) + case boil.AfterInsertHook: + componentClassAfterInsertHooks = append(componentClassAfterInsertHooks, componentClassHook) + case boil.BeforeUpdateHook: + componentClassBeforeUpdateHooks = append(componentClassBeforeUpdateHooks, componentClassHook) + case boil.AfterUpdateHook: + componentClassAfterUpdateHooks = append(componentClassAfterUpdateHooks, componentClassHook) + case boil.BeforeDeleteHook: + componentClassBeforeDeleteHooks = append(componentClassBeforeDeleteHooks, componentClassHook) + case boil.AfterDeleteHook: + componentClassAfterDeleteHooks = append(componentClassAfterDeleteHooks, componentClassHook) + case boil.BeforeUpsertHook: + componentClassBeforeUpsertHooks = append(componentClassBeforeUpsertHooks, componentClassHook) + case boil.AfterUpsertHook: + componentClassAfterUpsertHooks = append(componentClassAfterUpsertHooks, componentClassHook) + } +} + +// One returns a single componentClass record from the query. +func (q componentClassQuery) One(ctx context.Context, exec boil.ContextExecutor) (*ComponentClass, error) { + o := &ComponentClass{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for component_class") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all ComponentClass records from the query. +func (q componentClassQuery) All(ctx context.Context, exec boil.ContextExecutor) (ComponentClassSlice, error) { + var o []*ComponentClass + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to ComponentClass slice") + } + + if len(componentClassAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all ComponentClass records in the query. +func (q componentClassQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count component_class rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q componentClassQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if component_class exists") + } + + return count > 0, nil +} + +// ProteinClass pointed to by the foreign key. +func (o *ComponentClass) ProteinClass(mods ...qm.QueryMod) proteinClassificationQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"protein_class_id\" = ?", o.ProteinClassID), + } + + queryMods = append(queryMods, mods...) + + return ProteinClassifications(queryMods...) +} + +// Component pointed to by the foreign key. +func (o *ComponentClass) Component(mods ...qm.QueryMod) componentSequenceQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"component_id\" = ?", o.ComponentID), + } + + queryMods = append(queryMods, mods...) + + return ComponentSequences(queryMods...) +} + +// LoadProteinClass allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (componentClassL) LoadProteinClass(ctx context.Context, e boil.ContextExecutor, singular bool, maybeComponentClass interface{}, mods queries.Applicator) error { + var slice []*ComponentClass + var object *ComponentClass + + if singular { + object = maybeComponentClass.(*ComponentClass) + } else { + slice = *maybeComponentClass.(*[]*ComponentClass) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &componentClassR{} + } + args = append(args, object.ProteinClassID) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &componentClassR{} + } + + for _, a := range args { + if a == obj.ProteinClassID { + continue Outer + } + } + + args = append(args, obj.ProteinClassID) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`protein_classification`), + qm.WhereIn(`protein_classification.protein_class_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load ProteinClassification") + } + + var resultSlice []*ProteinClassification + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice ProteinClassification") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for protein_classification") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for protein_classification") + } + + if len(componentClassAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.ProteinClass = foreign + if foreign.R == nil { + foreign.R = &proteinClassificationR{} + } + foreign.R.ProteinClassComponentClasses = append(foreign.R.ProteinClassComponentClasses, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.ProteinClassID == foreign.ProteinClassID { + local.R.ProteinClass = foreign + if foreign.R == nil { + foreign.R = &proteinClassificationR{} + } + foreign.R.ProteinClassComponentClasses = append(foreign.R.ProteinClassComponentClasses, local) + break + } + } + } + + return nil +} + +// LoadComponent allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (componentClassL) LoadComponent(ctx context.Context, e boil.ContextExecutor, singular bool, maybeComponentClass interface{}, mods queries.Applicator) error { + var slice []*ComponentClass + var object *ComponentClass + + if singular { + object = maybeComponentClass.(*ComponentClass) + } else { + slice = *maybeComponentClass.(*[]*ComponentClass) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &componentClassR{} + } + args = append(args, object.ComponentID) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &componentClassR{} + } + + for _, a := range args { + if a == obj.ComponentID { + continue Outer + } + } + + args = append(args, obj.ComponentID) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`component_sequences`), + qm.WhereIn(`component_sequences.component_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load ComponentSequence") + } + + var resultSlice []*ComponentSequence + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice ComponentSequence") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for component_sequences") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for component_sequences") + } + + if len(componentClassAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Component = foreign + if foreign.R == nil { + foreign.R = &componentSequenceR{} + } + foreign.R.ComponentComponentClasses = append(foreign.R.ComponentComponentClasses, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.ComponentID == foreign.ComponentID { + local.R.Component = foreign + if foreign.R == nil { + foreign.R = &componentSequenceR{} + } + foreign.R.ComponentComponentClasses = append(foreign.R.ComponentComponentClasses, local) + break + } + } + } + + return nil +} + +// SetProteinClass of the componentClass to the related item. +// Sets o.R.ProteinClass to related. +// Adds o to related.R.ProteinClassComponentClasses. +func (o *ComponentClass) SetProteinClass(ctx context.Context, exec boil.ContextExecutor, insert bool, related *ProteinClassification) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"component_class\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"protein_class_id"}), + strmangle.WhereClause("\"", "\"", 0, componentClassPrimaryKeyColumns), + ) + values := []interface{}{related.ProteinClassID, o.CompClassID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.ProteinClassID = related.ProteinClassID + if o.R == nil { + o.R = &componentClassR{ + ProteinClass: related, + } + } else { + o.R.ProteinClass = related + } + + if related.R == nil { + related.R = &proteinClassificationR{ + ProteinClassComponentClasses: ComponentClassSlice{o}, + } + } else { + related.R.ProteinClassComponentClasses = append(related.R.ProteinClassComponentClasses, o) + } + + return nil +} + +// SetComponent of the componentClass to the related item. +// Sets o.R.Component to related. +// Adds o to related.R.ComponentComponentClasses. +func (o *ComponentClass) SetComponent(ctx context.Context, exec boil.ContextExecutor, insert bool, related *ComponentSequence) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"component_class\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"component_id"}), + strmangle.WhereClause("\"", "\"", 0, componentClassPrimaryKeyColumns), + ) + values := []interface{}{related.ComponentID, o.CompClassID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.ComponentID = related.ComponentID + if o.R == nil { + o.R = &componentClassR{ + Component: related, + } + } else { + o.R.Component = related + } + + if related.R == nil { + related.R = &componentSequenceR{ + ComponentComponentClasses: ComponentClassSlice{o}, + } + } else { + related.R.ComponentComponentClasses = append(related.R.ComponentComponentClasses, o) + } + + return nil +} + +// ComponentClasses retrieves all the records using an executor. +func ComponentClasses(mods ...qm.QueryMod) componentClassQuery { + mods = append(mods, qm.From("\"component_class\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"component_class\".*"}) + } + + return componentClassQuery{q} +} + +// FindComponentClass retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindComponentClass(ctx context.Context, exec boil.ContextExecutor, compClassID int64, selectCols ...string) (*ComponentClass, error) { + componentClassObj := &ComponentClass{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"component_class\" where \"comp_class_id\"=?", sel, + ) + + q := queries.Raw(query, compClassID) + + err := q.Bind(ctx, exec, componentClassObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from component_class") + } + + if err = componentClassObj.doAfterSelectHooks(ctx, exec); err != nil { + return componentClassObj, err + } + + return componentClassObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *ComponentClass) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no component_class provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(componentClassColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + componentClassInsertCacheMut.RLock() + cache, cached := componentClassInsertCache[key] + componentClassInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + componentClassAllColumns, + componentClassColumnsWithDefault, + componentClassColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(componentClassType, componentClassMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(componentClassType, componentClassMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"component_class\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"component_class\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into component_class") + } + + if !cached { + componentClassInsertCacheMut.Lock() + componentClassInsertCache[key] = cache + componentClassInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the ComponentClass. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *ComponentClass) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + componentClassUpdateCacheMut.RLock() + cache, cached := componentClassUpdateCache[key] + componentClassUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + componentClassAllColumns, + componentClassPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update component_class, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"component_class\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, componentClassPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(componentClassType, componentClassMapping, append(wl, componentClassPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update component_class row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for component_class") + } + + if !cached { + componentClassUpdateCacheMut.Lock() + componentClassUpdateCache[key] = cache + componentClassUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q componentClassQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for component_class") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for component_class") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o ComponentClassSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), componentClassPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"component_class\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, componentClassPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in componentClass slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all componentClass") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *ComponentClass) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no component_class provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(componentClassColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + componentClassUpsertCacheMut.RLock() + cache, cached := componentClassUpsertCache[key] + componentClassUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + componentClassAllColumns, + componentClassColumnsWithDefault, + componentClassColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + componentClassAllColumns, + componentClassPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert component_class, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(componentClassPrimaryKeyColumns)) + copy(conflict, componentClassPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"component_class\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(componentClassType, componentClassMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(componentClassType, componentClassMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert component_class") + } + + if !cached { + componentClassUpsertCacheMut.Lock() + componentClassUpsertCache[key] = cache + componentClassUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single ComponentClass record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *ComponentClass) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no ComponentClass provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), componentClassPrimaryKeyMapping) + sql := "DELETE FROM \"component_class\" WHERE \"comp_class_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from component_class") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for component_class") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q componentClassQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no componentClassQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from component_class") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for component_class") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o ComponentClassSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(componentClassBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), componentClassPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"component_class\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, componentClassPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from componentClass slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for component_class") + } + + if len(componentClassAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *ComponentClass) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindComponentClass(ctx, exec, o.CompClassID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *ComponentClassSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := ComponentClassSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), componentClassPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"component_class\".* FROM \"component_class\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, componentClassPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in ComponentClassSlice") + } + + *o = slice + + return nil +} + +// ComponentClassExists checks if the ComponentClass row exists. +func ComponentClassExists(ctx context.Context, exec boil.ContextExecutor, compClassID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"component_class\" where \"comp_class_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, compClassID) + } + row := exec.QueryRowContext(ctx, sql, compClassID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if component_class exists") + } + + return exists, nil +} diff --git a/models/component_domains.go b/models/component_domains.go new file mode 100644 index 0000000..19f4493 --- /dev/null +++ b/models/component_domains.go @@ -0,0 +1,1293 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// ComponentDomain is an object representing the database table. +type ComponentDomain struct { + CompdID int64 `boil:"compd_id" json:"compd_id" toml:"compd_id" yaml:"compd_id"` + DomainID null.Int64 `boil:"domain_id" json:"domain_id,omitempty" toml:"domain_id" yaml:"domain_id,omitempty"` + ComponentID int64 `boil:"component_id" json:"component_id" toml:"component_id" yaml:"component_id"` + StartPosition null.Int64 `boil:"start_position" json:"start_position,omitempty" toml:"start_position" yaml:"start_position,omitempty"` + EndPosition null.Int64 `boil:"end_position" json:"end_position,omitempty" toml:"end_position" yaml:"end_position,omitempty"` + + R *componentDomainR `boil:"-" json:"-" toml:"-" yaml:"-"` + L componentDomainL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var ComponentDomainColumns = struct { + CompdID string + DomainID string + ComponentID string + StartPosition string + EndPosition string +}{ + CompdID: "compd_id", + DomainID: "domain_id", + ComponentID: "component_id", + StartPosition: "start_position", + EndPosition: "end_position", +} + +var ComponentDomainTableColumns = struct { + CompdID string + DomainID string + ComponentID string + StartPosition string + EndPosition string +}{ + CompdID: "component_domains.compd_id", + DomainID: "component_domains.domain_id", + ComponentID: "component_domains.component_id", + StartPosition: "component_domains.start_position", + EndPosition: "component_domains.end_position", +} + +// Generated where + +var ComponentDomainWhere = struct { + CompdID whereHelperint64 + DomainID whereHelpernull_Int64 + ComponentID whereHelperint64 + StartPosition whereHelpernull_Int64 + EndPosition whereHelpernull_Int64 +}{ + CompdID: whereHelperint64{field: "\"component_domains\".\"compd_id\""}, + DomainID: whereHelpernull_Int64{field: "\"component_domains\".\"domain_id\""}, + ComponentID: whereHelperint64{field: "\"component_domains\".\"component_id\""}, + StartPosition: whereHelpernull_Int64{field: "\"component_domains\".\"start_position\""}, + EndPosition: whereHelpernull_Int64{field: "\"component_domains\".\"end_position\""}, +} + +// ComponentDomainRels is where relationship names are stored. +var ComponentDomainRels = struct { + Domain string + Component string +}{ + Domain: "Domain", + Component: "Component", +} + +// componentDomainR is where relationships are stored. +type componentDomainR struct { + Domain *Domain `boil:"Domain" json:"Domain" toml:"Domain" yaml:"Domain"` + Component *ComponentSequence `boil:"Component" json:"Component" toml:"Component" yaml:"Component"` +} + +// NewStruct creates a new relationship struct +func (*componentDomainR) NewStruct() *componentDomainR { + return &componentDomainR{} +} + +func (r *componentDomainR) GetDomain() *Domain { + if r == nil { + return nil + } + return r.Domain +} + +func (r *componentDomainR) GetComponent() *ComponentSequence { + if r == nil { + return nil + } + return r.Component +} + +// componentDomainL is where Load methods for each relationship are stored. +type componentDomainL struct{} + +var ( + componentDomainAllColumns = []string{"compd_id", "domain_id", "component_id", "start_position", "end_position"} + componentDomainColumnsWithoutDefault = []string{"compd_id", "component_id"} + componentDomainColumnsWithDefault = []string{"domain_id", "start_position", "end_position"} + componentDomainPrimaryKeyColumns = []string{"compd_id"} + componentDomainGeneratedColumns = []string{} +) + +type ( + // ComponentDomainSlice is an alias for a slice of pointers to ComponentDomain. + // This should almost always be used instead of []ComponentDomain. + ComponentDomainSlice []*ComponentDomain + // ComponentDomainHook is the signature for custom ComponentDomain hook methods + ComponentDomainHook func(context.Context, boil.ContextExecutor, *ComponentDomain) error + + componentDomainQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + componentDomainType = reflect.TypeOf(&ComponentDomain{}) + componentDomainMapping = queries.MakeStructMapping(componentDomainType) + componentDomainPrimaryKeyMapping, _ = queries.BindMapping(componentDomainType, componentDomainMapping, componentDomainPrimaryKeyColumns) + componentDomainInsertCacheMut sync.RWMutex + componentDomainInsertCache = make(map[string]insertCache) + componentDomainUpdateCacheMut sync.RWMutex + componentDomainUpdateCache = make(map[string]updateCache) + componentDomainUpsertCacheMut sync.RWMutex + componentDomainUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var componentDomainAfterSelectHooks []ComponentDomainHook + +var componentDomainBeforeInsertHooks []ComponentDomainHook +var componentDomainAfterInsertHooks []ComponentDomainHook + +var componentDomainBeforeUpdateHooks []ComponentDomainHook +var componentDomainAfterUpdateHooks []ComponentDomainHook + +var componentDomainBeforeDeleteHooks []ComponentDomainHook +var componentDomainAfterDeleteHooks []ComponentDomainHook + +var componentDomainBeforeUpsertHooks []ComponentDomainHook +var componentDomainAfterUpsertHooks []ComponentDomainHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *ComponentDomain) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range componentDomainAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *ComponentDomain) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range componentDomainBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *ComponentDomain) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range componentDomainAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *ComponentDomain) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range componentDomainBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *ComponentDomain) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range componentDomainAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *ComponentDomain) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range componentDomainBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *ComponentDomain) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range componentDomainAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *ComponentDomain) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range componentDomainBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *ComponentDomain) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range componentDomainAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddComponentDomainHook registers your hook function for all future operations. +func AddComponentDomainHook(hookPoint boil.HookPoint, componentDomainHook ComponentDomainHook) { + switch hookPoint { + case boil.AfterSelectHook: + componentDomainAfterSelectHooks = append(componentDomainAfterSelectHooks, componentDomainHook) + case boil.BeforeInsertHook: + componentDomainBeforeInsertHooks = append(componentDomainBeforeInsertHooks, componentDomainHook) + case boil.AfterInsertHook: + componentDomainAfterInsertHooks = append(componentDomainAfterInsertHooks, componentDomainHook) + case boil.BeforeUpdateHook: + componentDomainBeforeUpdateHooks = append(componentDomainBeforeUpdateHooks, componentDomainHook) + case boil.AfterUpdateHook: + componentDomainAfterUpdateHooks = append(componentDomainAfterUpdateHooks, componentDomainHook) + case boil.BeforeDeleteHook: + componentDomainBeforeDeleteHooks = append(componentDomainBeforeDeleteHooks, componentDomainHook) + case boil.AfterDeleteHook: + componentDomainAfterDeleteHooks = append(componentDomainAfterDeleteHooks, componentDomainHook) + case boil.BeforeUpsertHook: + componentDomainBeforeUpsertHooks = append(componentDomainBeforeUpsertHooks, componentDomainHook) + case boil.AfterUpsertHook: + componentDomainAfterUpsertHooks = append(componentDomainAfterUpsertHooks, componentDomainHook) + } +} + +// One returns a single componentDomain record from the query. +func (q componentDomainQuery) One(ctx context.Context, exec boil.ContextExecutor) (*ComponentDomain, error) { + o := &ComponentDomain{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for component_domains") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all ComponentDomain records from the query. +func (q componentDomainQuery) All(ctx context.Context, exec boil.ContextExecutor) (ComponentDomainSlice, error) { + var o []*ComponentDomain + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to ComponentDomain slice") + } + + if len(componentDomainAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all ComponentDomain records in the query. +func (q componentDomainQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count component_domains rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q componentDomainQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if component_domains exists") + } + + return count > 0, nil +} + +// Domain pointed to by the foreign key. +func (o *ComponentDomain) Domain(mods ...qm.QueryMod) domainQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"domain_id\" = ?", o.DomainID), + } + + queryMods = append(queryMods, mods...) + + return Domains(queryMods...) +} + +// Component pointed to by the foreign key. +func (o *ComponentDomain) Component(mods ...qm.QueryMod) componentSequenceQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"component_id\" = ?", o.ComponentID), + } + + queryMods = append(queryMods, mods...) + + return ComponentSequences(queryMods...) +} + +// LoadDomain allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (componentDomainL) LoadDomain(ctx context.Context, e boil.ContextExecutor, singular bool, maybeComponentDomain interface{}, mods queries.Applicator) error { + var slice []*ComponentDomain + var object *ComponentDomain + + if singular { + object = maybeComponentDomain.(*ComponentDomain) + } else { + slice = *maybeComponentDomain.(*[]*ComponentDomain) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &componentDomainR{} + } + if !queries.IsNil(object.DomainID) { + args = append(args, object.DomainID) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &componentDomainR{} + } + + for _, a := range args { + if queries.Equal(a, obj.DomainID) { + continue Outer + } + } + + if !queries.IsNil(obj.DomainID) { + args = append(args, obj.DomainID) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`domains`), + qm.WhereIn(`domains.domain_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load Domain") + } + + var resultSlice []*Domain + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice Domain") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for domains") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for domains") + } + + if len(componentDomainAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Domain = foreign + if foreign.R == nil { + foreign.R = &domainR{} + } + foreign.R.ComponentDomains = append(foreign.R.ComponentDomains, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.DomainID, foreign.DomainID) { + local.R.Domain = foreign + if foreign.R == nil { + foreign.R = &domainR{} + } + foreign.R.ComponentDomains = append(foreign.R.ComponentDomains, local) + break + } + } + } + + return nil +} + +// LoadComponent allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (componentDomainL) LoadComponent(ctx context.Context, e boil.ContextExecutor, singular bool, maybeComponentDomain interface{}, mods queries.Applicator) error { + var slice []*ComponentDomain + var object *ComponentDomain + + if singular { + object = maybeComponentDomain.(*ComponentDomain) + } else { + slice = *maybeComponentDomain.(*[]*ComponentDomain) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &componentDomainR{} + } + args = append(args, object.ComponentID) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &componentDomainR{} + } + + for _, a := range args { + if a == obj.ComponentID { + continue Outer + } + } + + args = append(args, obj.ComponentID) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`component_sequences`), + qm.WhereIn(`component_sequences.component_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load ComponentSequence") + } + + var resultSlice []*ComponentSequence + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice ComponentSequence") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for component_sequences") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for component_sequences") + } + + if len(componentDomainAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Component = foreign + if foreign.R == nil { + foreign.R = &componentSequenceR{} + } + foreign.R.ComponentComponentDomains = append(foreign.R.ComponentComponentDomains, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.ComponentID == foreign.ComponentID { + local.R.Component = foreign + if foreign.R == nil { + foreign.R = &componentSequenceR{} + } + foreign.R.ComponentComponentDomains = append(foreign.R.ComponentComponentDomains, local) + break + } + } + } + + return nil +} + +// SetDomain of the componentDomain to the related item. +// Sets o.R.Domain to related. +// Adds o to related.R.ComponentDomains. +func (o *ComponentDomain) SetDomain(ctx context.Context, exec boil.ContextExecutor, insert bool, related *Domain) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"component_domains\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"domain_id"}), + strmangle.WhereClause("\"", "\"", 0, componentDomainPrimaryKeyColumns), + ) + values := []interface{}{related.DomainID, o.CompdID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.DomainID, related.DomainID) + if o.R == nil { + o.R = &componentDomainR{ + Domain: related, + } + } else { + o.R.Domain = related + } + + if related.R == nil { + related.R = &domainR{ + ComponentDomains: ComponentDomainSlice{o}, + } + } else { + related.R.ComponentDomains = append(related.R.ComponentDomains, o) + } + + return nil +} + +// RemoveDomain relationship. +// Sets o.R.Domain to nil. +// Removes o from all passed in related items' relationships struct. +func (o *ComponentDomain) RemoveDomain(ctx context.Context, exec boil.ContextExecutor, related *Domain) error { + var err error + + queries.SetScanner(&o.DomainID, nil) + if _, err = o.Update(ctx, exec, boil.Whitelist("domain_id")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.Domain = nil + } + if related == nil || related.R == nil { + return nil + } + + for i, ri := range related.R.ComponentDomains { + if queries.Equal(o.DomainID, ri.DomainID) { + continue + } + + ln := len(related.R.ComponentDomains) + if ln > 1 && i < ln-1 { + related.R.ComponentDomains[i] = related.R.ComponentDomains[ln-1] + } + related.R.ComponentDomains = related.R.ComponentDomains[:ln-1] + break + } + return nil +} + +// SetComponent of the componentDomain to the related item. +// Sets o.R.Component to related. +// Adds o to related.R.ComponentComponentDomains. +func (o *ComponentDomain) SetComponent(ctx context.Context, exec boil.ContextExecutor, insert bool, related *ComponentSequence) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"component_domains\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"component_id"}), + strmangle.WhereClause("\"", "\"", 0, componentDomainPrimaryKeyColumns), + ) + values := []interface{}{related.ComponentID, o.CompdID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.ComponentID = related.ComponentID + if o.R == nil { + o.R = &componentDomainR{ + Component: related, + } + } else { + o.R.Component = related + } + + if related.R == nil { + related.R = &componentSequenceR{ + ComponentComponentDomains: ComponentDomainSlice{o}, + } + } else { + related.R.ComponentComponentDomains = append(related.R.ComponentComponentDomains, o) + } + + return nil +} + +// ComponentDomains retrieves all the records using an executor. +func ComponentDomains(mods ...qm.QueryMod) componentDomainQuery { + mods = append(mods, qm.From("\"component_domains\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"component_domains\".*"}) + } + + return componentDomainQuery{q} +} + +// FindComponentDomain retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindComponentDomain(ctx context.Context, exec boil.ContextExecutor, compdID int64, selectCols ...string) (*ComponentDomain, error) { + componentDomainObj := &ComponentDomain{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"component_domains\" where \"compd_id\"=?", sel, + ) + + q := queries.Raw(query, compdID) + + err := q.Bind(ctx, exec, componentDomainObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from component_domains") + } + + if err = componentDomainObj.doAfterSelectHooks(ctx, exec); err != nil { + return componentDomainObj, err + } + + return componentDomainObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *ComponentDomain) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no component_domains provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(componentDomainColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + componentDomainInsertCacheMut.RLock() + cache, cached := componentDomainInsertCache[key] + componentDomainInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + componentDomainAllColumns, + componentDomainColumnsWithDefault, + componentDomainColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(componentDomainType, componentDomainMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(componentDomainType, componentDomainMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"component_domains\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"component_domains\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into component_domains") + } + + if !cached { + componentDomainInsertCacheMut.Lock() + componentDomainInsertCache[key] = cache + componentDomainInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the ComponentDomain. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *ComponentDomain) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + componentDomainUpdateCacheMut.RLock() + cache, cached := componentDomainUpdateCache[key] + componentDomainUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + componentDomainAllColumns, + componentDomainPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update component_domains, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"component_domains\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, componentDomainPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(componentDomainType, componentDomainMapping, append(wl, componentDomainPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update component_domains row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for component_domains") + } + + if !cached { + componentDomainUpdateCacheMut.Lock() + componentDomainUpdateCache[key] = cache + componentDomainUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q componentDomainQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for component_domains") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for component_domains") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o ComponentDomainSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), componentDomainPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"component_domains\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, componentDomainPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in componentDomain slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all componentDomain") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *ComponentDomain) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no component_domains provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(componentDomainColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + componentDomainUpsertCacheMut.RLock() + cache, cached := componentDomainUpsertCache[key] + componentDomainUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + componentDomainAllColumns, + componentDomainColumnsWithDefault, + componentDomainColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + componentDomainAllColumns, + componentDomainPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert component_domains, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(componentDomainPrimaryKeyColumns)) + copy(conflict, componentDomainPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"component_domains\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(componentDomainType, componentDomainMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(componentDomainType, componentDomainMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert component_domains") + } + + if !cached { + componentDomainUpsertCacheMut.Lock() + componentDomainUpsertCache[key] = cache + componentDomainUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single ComponentDomain record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *ComponentDomain) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no ComponentDomain provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), componentDomainPrimaryKeyMapping) + sql := "DELETE FROM \"component_domains\" WHERE \"compd_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from component_domains") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for component_domains") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q componentDomainQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no componentDomainQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from component_domains") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for component_domains") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o ComponentDomainSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(componentDomainBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), componentDomainPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"component_domains\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, componentDomainPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from componentDomain slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for component_domains") + } + + if len(componentDomainAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *ComponentDomain) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindComponentDomain(ctx, exec, o.CompdID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *ComponentDomainSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := ComponentDomainSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), componentDomainPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"component_domains\".* FROM \"component_domains\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, componentDomainPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in ComponentDomainSlice") + } + + *o = slice + + return nil +} + +// ComponentDomainExists checks if the ComponentDomain row exists. +func ComponentDomainExists(ctx context.Context, exec boil.ContextExecutor, compdID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"component_domains\" where \"compd_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, compdID) + } + row := exec.QueryRowContext(ctx, sql, compdID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if component_domains exists") + } + + return exists, nil +} diff --git a/models/component_go.go b/models/component_go.go new file mode 100644 index 0000000..672dc2a --- /dev/null +++ b/models/component_go.go @@ -0,0 +1,1241 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// ComponentGo is an object representing the database table. +type ComponentGo struct { + CompGoID int64 `boil:"comp_go_id" json:"comp_go_id" toml:"comp_go_id" yaml:"comp_go_id"` + ComponentID int64 `boil:"component_id" json:"component_id" toml:"component_id" yaml:"component_id"` + GoID string `boil:"go_id" json:"go_id" toml:"go_id" yaml:"go_id"` + + R *componentGoR `boil:"-" json:"-" toml:"-" yaml:"-"` + L componentGoL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var ComponentGoColumns = struct { + CompGoID string + ComponentID string + GoID string +}{ + CompGoID: "comp_go_id", + ComponentID: "component_id", + GoID: "go_id", +} + +var ComponentGoTableColumns = struct { + CompGoID string + ComponentID string + GoID string +}{ + CompGoID: "component_go.comp_go_id", + ComponentID: "component_go.component_id", + GoID: "component_go.go_id", +} + +// Generated where + +var ComponentGoWhere = struct { + CompGoID whereHelperint64 + ComponentID whereHelperint64 + GoID whereHelperstring +}{ + CompGoID: whereHelperint64{field: "\"component_go\".\"comp_go_id\""}, + ComponentID: whereHelperint64{field: "\"component_go\".\"component_id\""}, + GoID: whereHelperstring{field: "\"component_go\".\"go_id\""}, +} + +// ComponentGoRels is where relationship names are stored. +var ComponentGoRels = struct { + Go string + Component string +}{ + Go: "Go", + Component: "Component", +} + +// componentGoR is where relationships are stored. +type componentGoR struct { + Go *GoClassification `boil:"Go" json:"Go" toml:"Go" yaml:"Go"` + Component *ComponentSequence `boil:"Component" json:"Component" toml:"Component" yaml:"Component"` +} + +// NewStruct creates a new relationship struct +func (*componentGoR) NewStruct() *componentGoR { + return &componentGoR{} +} + +func (r *componentGoR) GetGo() *GoClassification { + if r == nil { + return nil + } + return r.Go +} + +func (r *componentGoR) GetComponent() *ComponentSequence { + if r == nil { + return nil + } + return r.Component +} + +// componentGoL is where Load methods for each relationship are stored. +type componentGoL struct{} + +var ( + componentGoAllColumns = []string{"comp_go_id", "component_id", "go_id"} + componentGoColumnsWithoutDefault = []string{"comp_go_id", "component_id", "go_id"} + componentGoColumnsWithDefault = []string{} + componentGoPrimaryKeyColumns = []string{"comp_go_id"} + componentGoGeneratedColumns = []string{} +) + +type ( + // ComponentGoSlice is an alias for a slice of pointers to ComponentGo. + // This should almost always be used instead of []ComponentGo. + ComponentGoSlice []*ComponentGo + // ComponentGoHook is the signature for custom ComponentGo hook methods + ComponentGoHook func(context.Context, boil.ContextExecutor, *ComponentGo) error + + componentGoQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + componentGoType = reflect.TypeOf(&ComponentGo{}) + componentGoMapping = queries.MakeStructMapping(componentGoType) + componentGoPrimaryKeyMapping, _ = queries.BindMapping(componentGoType, componentGoMapping, componentGoPrimaryKeyColumns) + componentGoInsertCacheMut sync.RWMutex + componentGoInsertCache = make(map[string]insertCache) + componentGoUpdateCacheMut sync.RWMutex + componentGoUpdateCache = make(map[string]updateCache) + componentGoUpsertCacheMut sync.RWMutex + componentGoUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var componentGoAfterSelectHooks []ComponentGoHook + +var componentGoBeforeInsertHooks []ComponentGoHook +var componentGoAfterInsertHooks []ComponentGoHook + +var componentGoBeforeUpdateHooks []ComponentGoHook +var componentGoAfterUpdateHooks []ComponentGoHook + +var componentGoBeforeDeleteHooks []ComponentGoHook +var componentGoAfterDeleteHooks []ComponentGoHook + +var componentGoBeforeUpsertHooks []ComponentGoHook +var componentGoAfterUpsertHooks []ComponentGoHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *ComponentGo) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range componentGoAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *ComponentGo) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range componentGoBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *ComponentGo) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range componentGoAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *ComponentGo) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range componentGoBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *ComponentGo) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range componentGoAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *ComponentGo) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range componentGoBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *ComponentGo) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range componentGoAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *ComponentGo) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range componentGoBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *ComponentGo) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range componentGoAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddComponentGoHook registers your hook function for all future operations. +func AddComponentGoHook(hookPoint boil.HookPoint, componentGoHook ComponentGoHook) { + switch hookPoint { + case boil.AfterSelectHook: + componentGoAfterSelectHooks = append(componentGoAfterSelectHooks, componentGoHook) + case boil.BeforeInsertHook: + componentGoBeforeInsertHooks = append(componentGoBeforeInsertHooks, componentGoHook) + case boil.AfterInsertHook: + componentGoAfterInsertHooks = append(componentGoAfterInsertHooks, componentGoHook) + case boil.BeforeUpdateHook: + componentGoBeforeUpdateHooks = append(componentGoBeforeUpdateHooks, componentGoHook) + case boil.AfterUpdateHook: + componentGoAfterUpdateHooks = append(componentGoAfterUpdateHooks, componentGoHook) + case boil.BeforeDeleteHook: + componentGoBeforeDeleteHooks = append(componentGoBeforeDeleteHooks, componentGoHook) + case boil.AfterDeleteHook: + componentGoAfterDeleteHooks = append(componentGoAfterDeleteHooks, componentGoHook) + case boil.BeforeUpsertHook: + componentGoBeforeUpsertHooks = append(componentGoBeforeUpsertHooks, componentGoHook) + case boil.AfterUpsertHook: + componentGoAfterUpsertHooks = append(componentGoAfterUpsertHooks, componentGoHook) + } +} + +// One returns a single componentGo record from the query. +func (q componentGoQuery) One(ctx context.Context, exec boil.ContextExecutor) (*ComponentGo, error) { + o := &ComponentGo{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for component_go") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all ComponentGo records from the query. +func (q componentGoQuery) All(ctx context.Context, exec boil.ContextExecutor) (ComponentGoSlice, error) { + var o []*ComponentGo + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to ComponentGo slice") + } + + if len(componentGoAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all ComponentGo records in the query. +func (q componentGoQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count component_go rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q componentGoQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if component_go exists") + } + + return count > 0, nil +} + +// Go pointed to by the foreign key. +func (o *ComponentGo) Go(mods ...qm.QueryMod) goClassificationQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"go_id\" = ?", o.GoID), + } + + queryMods = append(queryMods, mods...) + + return GoClassifications(queryMods...) +} + +// Component pointed to by the foreign key. +func (o *ComponentGo) Component(mods ...qm.QueryMod) componentSequenceQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"component_id\" = ?", o.ComponentID), + } + + queryMods = append(queryMods, mods...) + + return ComponentSequences(queryMods...) +} + +// LoadGo allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (componentGoL) LoadGo(ctx context.Context, e boil.ContextExecutor, singular bool, maybeComponentGo interface{}, mods queries.Applicator) error { + var slice []*ComponentGo + var object *ComponentGo + + if singular { + object = maybeComponentGo.(*ComponentGo) + } else { + slice = *maybeComponentGo.(*[]*ComponentGo) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &componentGoR{} + } + args = append(args, object.GoID) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &componentGoR{} + } + + for _, a := range args { + if a == obj.GoID { + continue Outer + } + } + + args = append(args, obj.GoID) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`go_classification`), + qm.WhereIn(`go_classification.go_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load GoClassification") + } + + var resultSlice []*GoClassification + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice GoClassification") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for go_classification") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for go_classification") + } + + if len(componentGoAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Go = foreign + if foreign.R == nil { + foreign.R = &goClassificationR{} + } + foreign.R.GoComponentGos = append(foreign.R.GoComponentGos, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.GoID == foreign.GoID { + local.R.Go = foreign + if foreign.R == nil { + foreign.R = &goClassificationR{} + } + foreign.R.GoComponentGos = append(foreign.R.GoComponentGos, local) + break + } + } + } + + return nil +} + +// LoadComponent allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (componentGoL) LoadComponent(ctx context.Context, e boil.ContextExecutor, singular bool, maybeComponentGo interface{}, mods queries.Applicator) error { + var slice []*ComponentGo + var object *ComponentGo + + if singular { + object = maybeComponentGo.(*ComponentGo) + } else { + slice = *maybeComponentGo.(*[]*ComponentGo) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &componentGoR{} + } + args = append(args, object.ComponentID) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &componentGoR{} + } + + for _, a := range args { + if a == obj.ComponentID { + continue Outer + } + } + + args = append(args, obj.ComponentID) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`component_sequences`), + qm.WhereIn(`component_sequences.component_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load ComponentSequence") + } + + var resultSlice []*ComponentSequence + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice ComponentSequence") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for component_sequences") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for component_sequences") + } + + if len(componentGoAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Component = foreign + if foreign.R == nil { + foreign.R = &componentSequenceR{} + } + foreign.R.ComponentComponentGos = append(foreign.R.ComponentComponentGos, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.ComponentID == foreign.ComponentID { + local.R.Component = foreign + if foreign.R == nil { + foreign.R = &componentSequenceR{} + } + foreign.R.ComponentComponentGos = append(foreign.R.ComponentComponentGos, local) + break + } + } + } + + return nil +} + +// SetGo of the componentGo to the related item. +// Sets o.R.Go to related. +// Adds o to related.R.GoComponentGos. +func (o *ComponentGo) SetGo(ctx context.Context, exec boil.ContextExecutor, insert bool, related *GoClassification) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"component_go\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"go_id"}), + strmangle.WhereClause("\"", "\"", 0, componentGoPrimaryKeyColumns), + ) + values := []interface{}{related.GoID, o.CompGoID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.GoID = related.GoID + if o.R == nil { + o.R = &componentGoR{ + Go: related, + } + } else { + o.R.Go = related + } + + if related.R == nil { + related.R = &goClassificationR{ + GoComponentGos: ComponentGoSlice{o}, + } + } else { + related.R.GoComponentGos = append(related.R.GoComponentGos, o) + } + + return nil +} + +// SetComponent of the componentGo to the related item. +// Sets o.R.Component to related. +// Adds o to related.R.ComponentComponentGos. +func (o *ComponentGo) SetComponent(ctx context.Context, exec boil.ContextExecutor, insert bool, related *ComponentSequence) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"component_go\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"component_id"}), + strmangle.WhereClause("\"", "\"", 0, componentGoPrimaryKeyColumns), + ) + values := []interface{}{related.ComponentID, o.CompGoID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.ComponentID = related.ComponentID + if o.R == nil { + o.R = &componentGoR{ + Component: related, + } + } else { + o.R.Component = related + } + + if related.R == nil { + related.R = &componentSequenceR{ + ComponentComponentGos: ComponentGoSlice{o}, + } + } else { + related.R.ComponentComponentGos = append(related.R.ComponentComponentGos, o) + } + + return nil +} + +// ComponentGos retrieves all the records using an executor. +func ComponentGos(mods ...qm.QueryMod) componentGoQuery { + mods = append(mods, qm.From("\"component_go\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"component_go\".*"}) + } + + return componentGoQuery{q} +} + +// FindComponentGo retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindComponentGo(ctx context.Context, exec boil.ContextExecutor, compGoID int64, selectCols ...string) (*ComponentGo, error) { + componentGoObj := &ComponentGo{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"component_go\" where \"comp_go_id\"=?", sel, + ) + + q := queries.Raw(query, compGoID) + + err := q.Bind(ctx, exec, componentGoObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from component_go") + } + + if err = componentGoObj.doAfterSelectHooks(ctx, exec); err != nil { + return componentGoObj, err + } + + return componentGoObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *ComponentGo) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no component_go provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(componentGoColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + componentGoInsertCacheMut.RLock() + cache, cached := componentGoInsertCache[key] + componentGoInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + componentGoAllColumns, + componentGoColumnsWithDefault, + componentGoColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(componentGoType, componentGoMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(componentGoType, componentGoMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"component_go\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"component_go\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into component_go") + } + + if !cached { + componentGoInsertCacheMut.Lock() + componentGoInsertCache[key] = cache + componentGoInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the ComponentGo. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *ComponentGo) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + componentGoUpdateCacheMut.RLock() + cache, cached := componentGoUpdateCache[key] + componentGoUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + componentGoAllColumns, + componentGoPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update component_go, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"component_go\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, componentGoPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(componentGoType, componentGoMapping, append(wl, componentGoPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update component_go row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for component_go") + } + + if !cached { + componentGoUpdateCacheMut.Lock() + componentGoUpdateCache[key] = cache + componentGoUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q componentGoQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for component_go") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for component_go") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o ComponentGoSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), componentGoPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"component_go\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, componentGoPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in componentGo slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all componentGo") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *ComponentGo) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no component_go provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(componentGoColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + componentGoUpsertCacheMut.RLock() + cache, cached := componentGoUpsertCache[key] + componentGoUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + componentGoAllColumns, + componentGoColumnsWithDefault, + componentGoColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + componentGoAllColumns, + componentGoPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert component_go, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(componentGoPrimaryKeyColumns)) + copy(conflict, componentGoPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"component_go\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(componentGoType, componentGoMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(componentGoType, componentGoMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert component_go") + } + + if !cached { + componentGoUpsertCacheMut.Lock() + componentGoUpsertCache[key] = cache + componentGoUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single ComponentGo record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *ComponentGo) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no ComponentGo provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), componentGoPrimaryKeyMapping) + sql := "DELETE FROM \"component_go\" WHERE \"comp_go_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from component_go") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for component_go") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q componentGoQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no componentGoQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from component_go") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for component_go") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o ComponentGoSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(componentGoBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), componentGoPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"component_go\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, componentGoPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from componentGo slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for component_go") + } + + if len(componentGoAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *ComponentGo) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindComponentGo(ctx, exec, o.CompGoID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *ComponentGoSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := ComponentGoSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), componentGoPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"component_go\".* FROM \"component_go\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, componentGoPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in ComponentGoSlice") + } + + *o = slice + + return nil +} + +// ComponentGoExists checks if the ComponentGo row exists. +func ComponentGoExists(ctx context.Context, exec boil.ContextExecutor, compGoID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"component_go\" where \"comp_go_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, compGoID) + } + row := exec.QueryRowContext(ctx, sql, compGoID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if component_go exists") + } + + return exists, nil +} diff --git a/models/component_sequences.go b/models/component_sequences.go new file mode 100644 index 0000000..c04af7c --- /dev/null +++ b/models/component_sequences.go @@ -0,0 +1,2071 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// ComponentSequence is an object representing the database table. +type ComponentSequence struct { + ComponentID int64 `boil:"component_id" json:"component_id" toml:"component_id" yaml:"component_id"` + ComponentType null.String `boil:"component_type" json:"component_type,omitempty" toml:"component_type" yaml:"component_type,omitempty"` + Accession null.String `boil:"accession" json:"accession,omitempty" toml:"accession" yaml:"accession,omitempty"` + Sequence null.String `boil:"sequence" json:"sequence,omitempty" toml:"sequence" yaml:"sequence,omitempty"` + SequenceMd5sum null.String `boil:"sequence_md5sum" json:"sequence_md5sum,omitempty" toml:"sequence_md5sum" yaml:"sequence_md5sum,omitempty"` + Description null.String `boil:"description" json:"description,omitempty" toml:"description" yaml:"description,omitempty"` + TaxID null.Int64 `boil:"tax_id" json:"tax_id,omitempty" toml:"tax_id" yaml:"tax_id,omitempty"` + Organism null.String `boil:"organism" json:"organism,omitempty" toml:"organism" yaml:"organism,omitempty"` + DBSource null.String `boil:"db_source" json:"db_source,omitempty" toml:"db_source" yaml:"db_source,omitempty"` + DBVersion null.String `boil:"db_version" json:"db_version,omitempty" toml:"db_version" yaml:"db_version,omitempty"` + + R *componentSequenceR `boil:"-" json:"-" toml:"-" yaml:"-"` + L componentSequenceL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var ComponentSequenceColumns = struct { + ComponentID string + ComponentType string + Accession string + Sequence string + SequenceMd5sum string + Description string + TaxID string + Organism string + DBSource string + DBVersion string +}{ + ComponentID: "component_id", + ComponentType: "component_type", + Accession: "accession", + Sequence: "sequence", + SequenceMd5sum: "sequence_md5sum", + Description: "description", + TaxID: "tax_id", + Organism: "organism", + DBSource: "db_source", + DBVersion: "db_version", +} + +var ComponentSequenceTableColumns = struct { + ComponentID string + ComponentType string + Accession string + Sequence string + SequenceMd5sum string + Description string + TaxID string + Organism string + DBSource string + DBVersion string +}{ + ComponentID: "component_sequences.component_id", + ComponentType: "component_sequences.component_type", + Accession: "component_sequences.accession", + Sequence: "component_sequences.sequence", + SequenceMd5sum: "component_sequences.sequence_md5sum", + Description: "component_sequences.description", + TaxID: "component_sequences.tax_id", + Organism: "component_sequences.organism", + DBSource: "component_sequences.db_source", + DBVersion: "component_sequences.db_version", +} + +// Generated where + +var ComponentSequenceWhere = struct { + ComponentID whereHelperint64 + ComponentType whereHelpernull_String + Accession whereHelpernull_String + Sequence whereHelpernull_String + SequenceMd5sum whereHelpernull_String + Description whereHelpernull_String + TaxID whereHelpernull_Int64 + Organism whereHelpernull_String + DBSource whereHelpernull_String + DBVersion whereHelpernull_String +}{ + ComponentID: whereHelperint64{field: "\"component_sequences\".\"component_id\""}, + ComponentType: whereHelpernull_String{field: "\"component_sequences\".\"component_type\""}, + Accession: whereHelpernull_String{field: "\"component_sequences\".\"accession\""}, + Sequence: whereHelpernull_String{field: "\"component_sequences\".\"sequence\""}, + SequenceMd5sum: whereHelpernull_String{field: "\"component_sequences\".\"sequence_md5sum\""}, + Description: whereHelpernull_String{field: "\"component_sequences\".\"description\""}, + TaxID: whereHelpernull_Int64{field: "\"component_sequences\".\"tax_id\""}, + Organism: whereHelpernull_String{field: "\"component_sequences\".\"organism\""}, + DBSource: whereHelpernull_String{field: "\"component_sequences\".\"db_source\""}, + DBVersion: whereHelpernull_String{field: "\"component_sequences\".\"db_version\""}, +} + +// ComponentSequenceRels is where relationship names are stored. +var ComponentSequenceRels = struct { + ComponentComponentClasses string + ComponentComponentDomains string + ComponentComponentGos string + ComponentComponentSynonyms string + ComponentSiteComponents string + ComponentTargetComponents string +}{ + ComponentComponentClasses: "ComponentComponentClasses", + ComponentComponentDomains: "ComponentComponentDomains", + ComponentComponentGos: "ComponentComponentGos", + ComponentComponentSynonyms: "ComponentComponentSynonyms", + ComponentSiteComponents: "ComponentSiteComponents", + ComponentTargetComponents: "ComponentTargetComponents", +} + +// componentSequenceR is where relationships are stored. +type componentSequenceR struct { + ComponentComponentClasses ComponentClassSlice `boil:"ComponentComponentClasses" json:"ComponentComponentClasses" toml:"ComponentComponentClasses" yaml:"ComponentComponentClasses"` + ComponentComponentDomains ComponentDomainSlice `boil:"ComponentComponentDomains" json:"ComponentComponentDomains" toml:"ComponentComponentDomains" yaml:"ComponentComponentDomains"` + ComponentComponentGos ComponentGoSlice `boil:"ComponentComponentGos" json:"ComponentComponentGos" toml:"ComponentComponentGos" yaml:"ComponentComponentGos"` + ComponentComponentSynonyms ComponentSynonymSlice `boil:"ComponentComponentSynonyms" json:"ComponentComponentSynonyms" toml:"ComponentComponentSynonyms" yaml:"ComponentComponentSynonyms"` + ComponentSiteComponents SiteComponentSlice `boil:"ComponentSiteComponents" json:"ComponentSiteComponents" toml:"ComponentSiteComponents" yaml:"ComponentSiteComponents"` + ComponentTargetComponents TargetComponentSlice `boil:"ComponentTargetComponents" json:"ComponentTargetComponents" toml:"ComponentTargetComponents" yaml:"ComponentTargetComponents"` +} + +// NewStruct creates a new relationship struct +func (*componentSequenceR) NewStruct() *componentSequenceR { + return &componentSequenceR{} +} + +func (r *componentSequenceR) GetComponentComponentClasses() ComponentClassSlice { + if r == nil { + return nil + } + return r.ComponentComponentClasses +} + +func (r *componentSequenceR) GetComponentComponentDomains() ComponentDomainSlice { + if r == nil { + return nil + } + return r.ComponentComponentDomains +} + +func (r *componentSequenceR) GetComponentComponentGos() ComponentGoSlice { + if r == nil { + return nil + } + return r.ComponentComponentGos +} + +func (r *componentSequenceR) GetComponentComponentSynonyms() ComponentSynonymSlice { + if r == nil { + return nil + } + return r.ComponentComponentSynonyms +} + +func (r *componentSequenceR) GetComponentSiteComponents() SiteComponentSlice { + if r == nil { + return nil + } + return r.ComponentSiteComponents +} + +func (r *componentSequenceR) GetComponentTargetComponents() TargetComponentSlice { + if r == nil { + return nil + } + return r.ComponentTargetComponents +} + +// componentSequenceL is where Load methods for each relationship are stored. +type componentSequenceL struct{} + +var ( + componentSequenceAllColumns = []string{"component_id", "component_type", "accession", "sequence", "sequence_md5sum", "description", "tax_id", "organism", "db_source", "db_version"} + componentSequenceColumnsWithoutDefault = []string{"component_id"} + componentSequenceColumnsWithDefault = []string{"component_type", "accession", "sequence", "sequence_md5sum", "description", "tax_id", "organism", "db_source", "db_version"} + componentSequencePrimaryKeyColumns = []string{"component_id"} + componentSequenceGeneratedColumns = []string{} +) + +type ( + // ComponentSequenceSlice is an alias for a slice of pointers to ComponentSequence. + // This should almost always be used instead of []ComponentSequence. + ComponentSequenceSlice []*ComponentSequence + // ComponentSequenceHook is the signature for custom ComponentSequence hook methods + ComponentSequenceHook func(context.Context, boil.ContextExecutor, *ComponentSequence) error + + componentSequenceQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + componentSequenceType = reflect.TypeOf(&ComponentSequence{}) + componentSequenceMapping = queries.MakeStructMapping(componentSequenceType) + componentSequencePrimaryKeyMapping, _ = queries.BindMapping(componentSequenceType, componentSequenceMapping, componentSequencePrimaryKeyColumns) + componentSequenceInsertCacheMut sync.RWMutex + componentSequenceInsertCache = make(map[string]insertCache) + componentSequenceUpdateCacheMut sync.RWMutex + componentSequenceUpdateCache = make(map[string]updateCache) + componentSequenceUpsertCacheMut sync.RWMutex + componentSequenceUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var componentSequenceAfterSelectHooks []ComponentSequenceHook + +var componentSequenceBeforeInsertHooks []ComponentSequenceHook +var componentSequenceAfterInsertHooks []ComponentSequenceHook + +var componentSequenceBeforeUpdateHooks []ComponentSequenceHook +var componentSequenceAfterUpdateHooks []ComponentSequenceHook + +var componentSequenceBeforeDeleteHooks []ComponentSequenceHook +var componentSequenceAfterDeleteHooks []ComponentSequenceHook + +var componentSequenceBeforeUpsertHooks []ComponentSequenceHook +var componentSequenceAfterUpsertHooks []ComponentSequenceHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *ComponentSequence) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range componentSequenceAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *ComponentSequence) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range componentSequenceBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *ComponentSequence) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range componentSequenceAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *ComponentSequence) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range componentSequenceBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *ComponentSequence) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range componentSequenceAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *ComponentSequence) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range componentSequenceBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *ComponentSequence) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range componentSequenceAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *ComponentSequence) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range componentSequenceBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *ComponentSequence) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range componentSequenceAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddComponentSequenceHook registers your hook function for all future operations. +func AddComponentSequenceHook(hookPoint boil.HookPoint, componentSequenceHook ComponentSequenceHook) { + switch hookPoint { + case boil.AfterSelectHook: + componentSequenceAfterSelectHooks = append(componentSequenceAfterSelectHooks, componentSequenceHook) + case boil.BeforeInsertHook: + componentSequenceBeforeInsertHooks = append(componentSequenceBeforeInsertHooks, componentSequenceHook) + case boil.AfterInsertHook: + componentSequenceAfterInsertHooks = append(componentSequenceAfterInsertHooks, componentSequenceHook) + case boil.BeforeUpdateHook: + componentSequenceBeforeUpdateHooks = append(componentSequenceBeforeUpdateHooks, componentSequenceHook) + case boil.AfterUpdateHook: + componentSequenceAfterUpdateHooks = append(componentSequenceAfterUpdateHooks, componentSequenceHook) + case boil.BeforeDeleteHook: + componentSequenceBeforeDeleteHooks = append(componentSequenceBeforeDeleteHooks, componentSequenceHook) + case boil.AfterDeleteHook: + componentSequenceAfterDeleteHooks = append(componentSequenceAfterDeleteHooks, componentSequenceHook) + case boil.BeforeUpsertHook: + componentSequenceBeforeUpsertHooks = append(componentSequenceBeforeUpsertHooks, componentSequenceHook) + case boil.AfterUpsertHook: + componentSequenceAfterUpsertHooks = append(componentSequenceAfterUpsertHooks, componentSequenceHook) + } +} + +// One returns a single componentSequence record from the query. +func (q componentSequenceQuery) One(ctx context.Context, exec boil.ContextExecutor) (*ComponentSequence, error) { + o := &ComponentSequence{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for component_sequences") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all ComponentSequence records from the query. +func (q componentSequenceQuery) All(ctx context.Context, exec boil.ContextExecutor) (ComponentSequenceSlice, error) { + var o []*ComponentSequence + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to ComponentSequence slice") + } + + if len(componentSequenceAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all ComponentSequence records in the query. +func (q componentSequenceQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count component_sequences rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q componentSequenceQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if component_sequences exists") + } + + return count > 0, nil +} + +// ComponentComponentClasses retrieves all the component_class's ComponentClasses with an executor via component_id column. +func (o *ComponentSequence) ComponentComponentClasses(mods ...qm.QueryMod) componentClassQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"component_class\".\"component_id\"=?", o.ComponentID), + ) + + return ComponentClasses(queryMods...) +} + +// ComponentComponentDomains retrieves all the component_domain's ComponentDomains with an executor via component_id column. +func (o *ComponentSequence) ComponentComponentDomains(mods ...qm.QueryMod) componentDomainQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"component_domains\".\"component_id\"=?", o.ComponentID), + ) + + return ComponentDomains(queryMods...) +} + +// ComponentComponentGos retrieves all the component_go's ComponentGos with an executor via component_id column. +func (o *ComponentSequence) ComponentComponentGos(mods ...qm.QueryMod) componentGoQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"component_go\".\"component_id\"=?", o.ComponentID), + ) + + return ComponentGos(queryMods...) +} + +// ComponentComponentSynonyms retrieves all the component_synonym's ComponentSynonyms with an executor via component_id column. +func (o *ComponentSequence) ComponentComponentSynonyms(mods ...qm.QueryMod) componentSynonymQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"component_synonyms\".\"component_id\"=?", o.ComponentID), + ) + + return ComponentSynonyms(queryMods...) +} + +// ComponentSiteComponents retrieves all the site_component's SiteComponents with an executor via component_id column. +func (o *ComponentSequence) ComponentSiteComponents(mods ...qm.QueryMod) siteComponentQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"site_components\".\"component_id\"=?", o.ComponentID), + ) + + return SiteComponents(queryMods...) +} + +// ComponentTargetComponents retrieves all the target_component's TargetComponents with an executor via component_id column. +func (o *ComponentSequence) ComponentTargetComponents(mods ...qm.QueryMod) targetComponentQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"target_components\".\"component_id\"=?", o.ComponentID), + ) + + return TargetComponents(queryMods...) +} + +// LoadComponentComponentClasses allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (componentSequenceL) LoadComponentComponentClasses(ctx context.Context, e boil.ContextExecutor, singular bool, maybeComponentSequence interface{}, mods queries.Applicator) error { + var slice []*ComponentSequence + var object *ComponentSequence + + if singular { + object = maybeComponentSequence.(*ComponentSequence) + } else { + slice = *maybeComponentSequence.(*[]*ComponentSequence) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &componentSequenceR{} + } + args = append(args, object.ComponentID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &componentSequenceR{} + } + + for _, a := range args { + if a == obj.ComponentID { + continue Outer + } + } + + args = append(args, obj.ComponentID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`component_class`), + qm.WhereIn(`component_class.component_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load component_class") + } + + var resultSlice []*ComponentClass + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice component_class") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on component_class") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for component_class") + } + + if len(componentClassAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.ComponentComponentClasses = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &componentClassR{} + } + foreign.R.Component = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.ComponentID == foreign.ComponentID { + local.R.ComponentComponentClasses = append(local.R.ComponentComponentClasses, foreign) + if foreign.R == nil { + foreign.R = &componentClassR{} + } + foreign.R.Component = local + break + } + } + } + + return nil +} + +// LoadComponentComponentDomains allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (componentSequenceL) LoadComponentComponentDomains(ctx context.Context, e boil.ContextExecutor, singular bool, maybeComponentSequence interface{}, mods queries.Applicator) error { + var slice []*ComponentSequence + var object *ComponentSequence + + if singular { + object = maybeComponentSequence.(*ComponentSequence) + } else { + slice = *maybeComponentSequence.(*[]*ComponentSequence) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &componentSequenceR{} + } + args = append(args, object.ComponentID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &componentSequenceR{} + } + + for _, a := range args { + if a == obj.ComponentID { + continue Outer + } + } + + args = append(args, obj.ComponentID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`component_domains`), + qm.WhereIn(`component_domains.component_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load component_domains") + } + + var resultSlice []*ComponentDomain + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice component_domains") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on component_domains") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for component_domains") + } + + if len(componentDomainAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.ComponentComponentDomains = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &componentDomainR{} + } + foreign.R.Component = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.ComponentID == foreign.ComponentID { + local.R.ComponentComponentDomains = append(local.R.ComponentComponentDomains, foreign) + if foreign.R == nil { + foreign.R = &componentDomainR{} + } + foreign.R.Component = local + break + } + } + } + + return nil +} + +// LoadComponentComponentGos allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (componentSequenceL) LoadComponentComponentGos(ctx context.Context, e boil.ContextExecutor, singular bool, maybeComponentSequence interface{}, mods queries.Applicator) error { + var slice []*ComponentSequence + var object *ComponentSequence + + if singular { + object = maybeComponentSequence.(*ComponentSequence) + } else { + slice = *maybeComponentSequence.(*[]*ComponentSequence) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &componentSequenceR{} + } + args = append(args, object.ComponentID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &componentSequenceR{} + } + + for _, a := range args { + if a == obj.ComponentID { + continue Outer + } + } + + args = append(args, obj.ComponentID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`component_go`), + qm.WhereIn(`component_go.component_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load component_go") + } + + var resultSlice []*ComponentGo + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice component_go") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on component_go") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for component_go") + } + + if len(componentGoAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.ComponentComponentGos = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &componentGoR{} + } + foreign.R.Component = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.ComponentID == foreign.ComponentID { + local.R.ComponentComponentGos = append(local.R.ComponentComponentGos, foreign) + if foreign.R == nil { + foreign.R = &componentGoR{} + } + foreign.R.Component = local + break + } + } + } + + return nil +} + +// LoadComponentComponentSynonyms allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (componentSequenceL) LoadComponentComponentSynonyms(ctx context.Context, e boil.ContextExecutor, singular bool, maybeComponentSequence interface{}, mods queries.Applicator) error { + var slice []*ComponentSequence + var object *ComponentSequence + + if singular { + object = maybeComponentSequence.(*ComponentSequence) + } else { + slice = *maybeComponentSequence.(*[]*ComponentSequence) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &componentSequenceR{} + } + args = append(args, object.ComponentID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &componentSequenceR{} + } + + for _, a := range args { + if a == obj.ComponentID { + continue Outer + } + } + + args = append(args, obj.ComponentID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`component_synonyms`), + qm.WhereIn(`component_synonyms.component_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load component_synonyms") + } + + var resultSlice []*ComponentSynonym + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice component_synonyms") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on component_synonyms") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for component_synonyms") + } + + if len(componentSynonymAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.ComponentComponentSynonyms = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &componentSynonymR{} + } + foreign.R.Component = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.ComponentID == foreign.ComponentID { + local.R.ComponentComponentSynonyms = append(local.R.ComponentComponentSynonyms, foreign) + if foreign.R == nil { + foreign.R = &componentSynonymR{} + } + foreign.R.Component = local + break + } + } + } + + return nil +} + +// LoadComponentSiteComponents allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (componentSequenceL) LoadComponentSiteComponents(ctx context.Context, e boil.ContextExecutor, singular bool, maybeComponentSequence interface{}, mods queries.Applicator) error { + var slice []*ComponentSequence + var object *ComponentSequence + + if singular { + object = maybeComponentSequence.(*ComponentSequence) + } else { + slice = *maybeComponentSequence.(*[]*ComponentSequence) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &componentSequenceR{} + } + args = append(args, object.ComponentID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &componentSequenceR{} + } + + for _, a := range args { + if queries.Equal(a, obj.ComponentID) { + continue Outer + } + } + + args = append(args, obj.ComponentID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`site_components`), + qm.WhereIn(`site_components.component_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load site_components") + } + + var resultSlice []*SiteComponent + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice site_components") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on site_components") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for site_components") + } + + if len(siteComponentAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.ComponentSiteComponents = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &siteComponentR{} + } + foreign.R.Component = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.ComponentID, foreign.ComponentID) { + local.R.ComponentSiteComponents = append(local.R.ComponentSiteComponents, foreign) + if foreign.R == nil { + foreign.R = &siteComponentR{} + } + foreign.R.Component = local + break + } + } + } + + return nil +} + +// LoadComponentTargetComponents allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (componentSequenceL) LoadComponentTargetComponents(ctx context.Context, e boil.ContextExecutor, singular bool, maybeComponentSequence interface{}, mods queries.Applicator) error { + var slice []*ComponentSequence + var object *ComponentSequence + + if singular { + object = maybeComponentSequence.(*ComponentSequence) + } else { + slice = *maybeComponentSequence.(*[]*ComponentSequence) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &componentSequenceR{} + } + args = append(args, object.ComponentID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &componentSequenceR{} + } + + for _, a := range args { + if a == obj.ComponentID { + continue Outer + } + } + + args = append(args, obj.ComponentID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`target_components`), + qm.WhereIn(`target_components.component_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load target_components") + } + + var resultSlice []*TargetComponent + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice target_components") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on target_components") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for target_components") + } + + if len(targetComponentAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.ComponentTargetComponents = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &targetComponentR{} + } + foreign.R.Component = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.ComponentID == foreign.ComponentID { + local.R.ComponentTargetComponents = append(local.R.ComponentTargetComponents, foreign) + if foreign.R == nil { + foreign.R = &targetComponentR{} + } + foreign.R.Component = local + break + } + } + } + + return nil +} + +// AddComponentComponentClasses adds the given related objects to the existing relationships +// of the component_sequence, optionally inserting them as new records. +// Appends related to o.R.ComponentComponentClasses. +// Sets related.R.Component appropriately. +func (o *ComponentSequence) AddComponentComponentClasses(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*ComponentClass) error { + var err error + for _, rel := range related { + if insert { + rel.ComponentID = o.ComponentID + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"component_class\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"component_id"}), + strmangle.WhereClause("\"", "\"", 0, componentClassPrimaryKeyColumns), + ) + values := []interface{}{o.ComponentID, rel.CompClassID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.ComponentID = o.ComponentID + } + } + + if o.R == nil { + o.R = &componentSequenceR{ + ComponentComponentClasses: related, + } + } else { + o.R.ComponentComponentClasses = append(o.R.ComponentComponentClasses, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &componentClassR{ + Component: o, + } + } else { + rel.R.Component = o + } + } + return nil +} + +// AddComponentComponentDomains adds the given related objects to the existing relationships +// of the component_sequence, optionally inserting them as new records. +// Appends related to o.R.ComponentComponentDomains. +// Sets related.R.Component appropriately. +func (o *ComponentSequence) AddComponentComponentDomains(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*ComponentDomain) error { + var err error + for _, rel := range related { + if insert { + rel.ComponentID = o.ComponentID + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"component_domains\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"component_id"}), + strmangle.WhereClause("\"", "\"", 0, componentDomainPrimaryKeyColumns), + ) + values := []interface{}{o.ComponentID, rel.CompdID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.ComponentID = o.ComponentID + } + } + + if o.R == nil { + o.R = &componentSequenceR{ + ComponentComponentDomains: related, + } + } else { + o.R.ComponentComponentDomains = append(o.R.ComponentComponentDomains, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &componentDomainR{ + Component: o, + } + } else { + rel.R.Component = o + } + } + return nil +} + +// AddComponentComponentGos adds the given related objects to the existing relationships +// of the component_sequence, optionally inserting them as new records. +// Appends related to o.R.ComponentComponentGos. +// Sets related.R.Component appropriately. +func (o *ComponentSequence) AddComponentComponentGos(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*ComponentGo) error { + var err error + for _, rel := range related { + if insert { + rel.ComponentID = o.ComponentID + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"component_go\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"component_id"}), + strmangle.WhereClause("\"", "\"", 0, componentGoPrimaryKeyColumns), + ) + values := []interface{}{o.ComponentID, rel.CompGoID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.ComponentID = o.ComponentID + } + } + + if o.R == nil { + o.R = &componentSequenceR{ + ComponentComponentGos: related, + } + } else { + o.R.ComponentComponentGos = append(o.R.ComponentComponentGos, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &componentGoR{ + Component: o, + } + } else { + rel.R.Component = o + } + } + return nil +} + +// AddComponentComponentSynonyms adds the given related objects to the existing relationships +// of the component_sequence, optionally inserting them as new records. +// Appends related to o.R.ComponentComponentSynonyms. +// Sets related.R.Component appropriately. +func (o *ComponentSequence) AddComponentComponentSynonyms(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*ComponentSynonym) error { + var err error + for _, rel := range related { + if insert { + rel.ComponentID = o.ComponentID + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"component_synonyms\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"component_id"}), + strmangle.WhereClause("\"", "\"", 0, componentSynonymPrimaryKeyColumns), + ) + values := []interface{}{o.ComponentID, rel.CompsynID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.ComponentID = o.ComponentID + } + } + + if o.R == nil { + o.R = &componentSequenceR{ + ComponentComponentSynonyms: related, + } + } else { + o.R.ComponentComponentSynonyms = append(o.R.ComponentComponentSynonyms, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &componentSynonymR{ + Component: o, + } + } else { + rel.R.Component = o + } + } + return nil +} + +// AddComponentSiteComponents adds the given related objects to the existing relationships +// of the component_sequence, optionally inserting them as new records. +// Appends related to o.R.ComponentSiteComponents. +// Sets related.R.Component appropriately. +func (o *ComponentSequence) AddComponentSiteComponents(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*SiteComponent) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.ComponentID, o.ComponentID) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"site_components\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"component_id"}), + strmangle.WhereClause("\"", "\"", 0, siteComponentPrimaryKeyColumns), + ) + values := []interface{}{o.ComponentID, rel.SitecompID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.ComponentID, o.ComponentID) + } + } + + if o.R == nil { + o.R = &componentSequenceR{ + ComponentSiteComponents: related, + } + } else { + o.R.ComponentSiteComponents = append(o.R.ComponentSiteComponents, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &siteComponentR{ + Component: o, + } + } else { + rel.R.Component = o + } + } + return nil +} + +// SetComponentSiteComponents removes all previously related items of the +// component_sequence replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.Component's ComponentSiteComponents accordingly. +// Replaces o.R.ComponentSiteComponents with related. +// Sets related.R.Component's ComponentSiteComponents accordingly. +func (o *ComponentSequence) SetComponentSiteComponents(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*SiteComponent) error { + query := "update \"site_components\" set \"component_id\" = null where \"component_id\" = ?" + values := []interface{}{o.ComponentID} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + if o.R != nil { + for _, rel := range o.R.ComponentSiteComponents { + queries.SetScanner(&rel.ComponentID, nil) + if rel.R == nil { + continue + } + + rel.R.Component = nil + } + o.R.ComponentSiteComponents = nil + } + + return o.AddComponentSiteComponents(ctx, exec, insert, related...) +} + +// RemoveComponentSiteComponents relationships from objects passed in. +// Removes related items from R.ComponentSiteComponents (uses pointer comparison, removal does not keep order) +// Sets related.R.Component. +func (o *ComponentSequence) RemoveComponentSiteComponents(ctx context.Context, exec boil.ContextExecutor, related ...*SiteComponent) error { + if len(related) == 0 { + return nil + } + + var err error + for _, rel := range related { + queries.SetScanner(&rel.ComponentID, nil) + if rel.R != nil { + rel.R.Component = nil + } + if _, err = rel.Update(ctx, exec, boil.Whitelist("component_id")); err != nil { + return err + } + } + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.ComponentSiteComponents { + if rel != ri { + continue + } + + ln := len(o.R.ComponentSiteComponents) + if ln > 1 && i < ln-1 { + o.R.ComponentSiteComponents[i] = o.R.ComponentSiteComponents[ln-1] + } + o.R.ComponentSiteComponents = o.R.ComponentSiteComponents[:ln-1] + break + } + } + + return nil +} + +// AddComponentTargetComponents adds the given related objects to the existing relationships +// of the component_sequence, optionally inserting them as new records. +// Appends related to o.R.ComponentTargetComponents. +// Sets related.R.Component appropriately. +func (o *ComponentSequence) AddComponentTargetComponents(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*TargetComponent) error { + var err error + for _, rel := range related { + if insert { + rel.ComponentID = o.ComponentID + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"target_components\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"component_id"}), + strmangle.WhereClause("\"", "\"", 0, targetComponentPrimaryKeyColumns), + ) + values := []interface{}{o.ComponentID, rel.TargcompID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.ComponentID = o.ComponentID + } + } + + if o.R == nil { + o.R = &componentSequenceR{ + ComponentTargetComponents: related, + } + } else { + o.R.ComponentTargetComponents = append(o.R.ComponentTargetComponents, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &targetComponentR{ + Component: o, + } + } else { + rel.R.Component = o + } + } + return nil +} + +// ComponentSequences retrieves all the records using an executor. +func ComponentSequences(mods ...qm.QueryMod) componentSequenceQuery { + mods = append(mods, qm.From("\"component_sequences\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"component_sequences\".*"}) + } + + return componentSequenceQuery{q} +} + +// FindComponentSequence retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindComponentSequence(ctx context.Context, exec boil.ContextExecutor, componentID int64, selectCols ...string) (*ComponentSequence, error) { + componentSequenceObj := &ComponentSequence{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"component_sequences\" where \"component_id\"=?", sel, + ) + + q := queries.Raw(query, componentID) + + err := q.Bind(ctx, exec, componentSequenceObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from component_sequences") + } + + if err = componentSequenceObj.doAfterSelectHooks(ctx, exec); err != nil { + return componentSequenceObj, err + } + + return componentSequenceObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *ComponentSequence) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no component_sequences provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(componentSequenceColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + componentSequenceInsertCacheMut.RLock() + cache, cached := componentSequenceInsertCache[key] + componentSequenceInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + componentSequenceAllColumns, + componentSequenceColumnsWithDefault, + componentSequenceColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(componentSequenceType, componentSequenceMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(componentSequenceType, componentSequenceMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"component_sequences\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"component_sequences\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into component_sequences") + } + + if !cached { + componentSequenceInsertCacheMut.Lock() + componentSequenceInsertCache[key] = cache + componentSequenceInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the ComponentSequence. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *ComponentSequence) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + componentSequenceUpdateCacheMut.RLock() + cache, cached := componentSequenceUpdateCache[key] + componentSequenceUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + componentSequenceAllColumns, + componentSequencePrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update component_sequences, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"component_sequences\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, componentSequencePrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(componentSequenceType, componentSequenceMapping, append(wl, componentSequencePrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update component_sequences row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for component_sequences") + } + + if !cached { + componentSequenceUpdateCacheMut.Lock() + componentSequenceUpdateCache[key] = cache + componentSequenceUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q componentSequenceQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for component_sequences") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for component_sequences") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o ComponentSequenceSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), componentSequencePrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"component_sequences\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, componentSequencePrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in componentSequence slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all componentSequence") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *ComponentSequence) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no component_sequences provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(componentSequenceColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + componentSequenceUpsertCacheMut.RLock() + cache, cached := componentSequenceUpsertCache[key] + componentSequenceUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + componentSequenceAllColumns, + componentSequenceColumnsWithDefault, + componentSequenceColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + componentSequenceAllColumns, + componentSequencePrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert component_sequences, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(componentSequencePrimaryKeyColumns)) + copy(conflict, componentSequencePrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"component_sequences\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(componentSequenceType, componentSequenceMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(componentSequenceType, componentSequenceMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert component_sequences") + } + + if !cached { + componentSequenceUpsertCacheMut.Lock() + componentSequenceUpsertCache[key] = cache + componentSequenceUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single ComponentSequence record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *ComponentSequence) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no ComponentSequence provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), componentSequencePrimaryKeyMapping) + sql := "DELETE FROM \"component_sequences\" WHERE \"component_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from component_sequences") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for component_sequences") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q componentSequenceQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no componentSequenceQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from component_sequences") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for component_sequences") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o ComponentSequenceSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(componentSequenceBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), componentSequencePrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"component_sequences\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, componentSequencePrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from componentSequence slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for component_sequences") + } + + if len(componentSequenceAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *ComponentSequence) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindComponentSequence(ctx, exec, o.ComponentID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *ComponentSequenceSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := ComponentSequenceSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), componentSequencePrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"component_sequences\".* FROM \"component_sequences\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, componentSequencePrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in ComponentSequenceSlice") + } + + *o = slice + + return nil +} + +// ComponentSequenceExists checks if the ComponentSequence row exists. +func ComponentSequenceExists(ctx context.Context, exec boil.ContextExecutor, componentID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"component_sequences\" where \"component_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, componentID) + } + row := exec.QueryRowContext(ctx, sql, componentID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if component_sequences exists") + } + + return exists, nil +} diff --git a/models/component_synonyms.go b/models/component_synonyms.go new file mode 100644 index 0000000..8310f67 --- /dev/null +++ b/models/component_synonyms.go @@ -0,0 +1,1077 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// ComponentSynonym is an object representing the database table. +type ComponentSynonym struct { + CompsynID int64 `boil:"compsyn_id" json:"compsyn_id" toml:"compsyn_id" yaml:"compsyn_id"` + ComponentID int64 `boil:"component_id" json:"component_id" toml:"component_id" yaml:"component_id"` + ComponentSynonym null.String `boil:"component_synonym" json:"component_synonym,omitempty" toml:"component_synonym" yaml:"component_synonym,omitempty"` + SynType null.String `boil:"syn_type" json:"syn_type,omitempty" toml:"syn_type" yaml:"syn_type,omitempty"` + + R *componentSynonymR `boil:"-" json:"-" toml:"-" yaml:"-"` + L componentSynonymL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var ComponentSynonymColumns = struct { + CompsynID string + ComponentID string + ComponentSynonym string + SynType string +}{ + CompsynID: "compsyn_id", + ComponentID: "component_id", + ComponentSynonym: "component_synonym", + SynType: "syn_type", +} + +var ComponentSynonymTableColumns = struct { + CompsynID string + ComponentID string + ComponentSynonym string + SynType string +}{ + CompsynID: "component_synonyms.compsyn_id", + ComponentID: "component_synonyms.component_id", + ComponentSynonym: "component_synonyms.component_synonym", + SynType: "component_synonyms.syn_type", +} + +// Generated where + +var ComponentSynonymWhere = struct { + CompsynID whereHelperint64 + ComponentID whereHelperint64 + ComponentSynonym whereHelpernull_String + SynType whereHelpernull_String +}{ + CompsynID: whereHelperint64{field: "\"component_synonyms\".\"compsyn_id\""}, + ComponentID: whereHelperint64{field: "\"component_synonyms\".\"component_id\""}, + ComponentSynonym: whereHelpernull_String{field: "\"component_synonyms\".\"component_synonym\""}, + SynType: whereHelpernull_String{field: "\"component_synonyms\".\"syn_type\""}, +} + +// ComponentSynonymRels is where relationship names are stored. +var ComponentSynonymRels = struct { + Component string +}{ + Component: "Component", +} + +// componentSynonymR is where relationships are stored. +type componentSynonymR struct { + Component *ComponentSequence `boil:"Component" json:"Component" toml:"Component" yaml:"Component"` +} + +// NewStruct creates a new relationship struct +func (*componentSynonymR) NewStruct() *componentSynonymR { + return &componentSynonymR{} +} + +func (r *componentSynonymR) GetComponent() *ComponentSequence { + if r == nil { + return nil + } + return r.Component +} + +// componentSynonymL is where Load methods for each relationship are stored. +type componentSynonymL struct{} + +var ( + componentSynonymAllColumns = []string{"compsyn_id", "component_id", "component_synonym", "syn_type"} + componentSynonymColumnsWithoutDefault = []string{"compsyn_id", "component_id"} + componentSynonymColumnsWithDefault = []string{"component_synonym", "syn_type"} + componentSynonymPrimaryKeyColumns = []string{"compsyn_id"} + componentSynonymGeneratedColumns = []string{} +) + +type ( + // ComponentSynonymSlice is an alias for a slice of pointers to ComponentSynonym. + // This should almost always be used instead of []ComponentSynonym. + ComponentSynonymSlice []*ComponentSynonym + // ComponentSynonymHook is the signature for custom ComponentSynonym hook methods + ComponentSynonymHook func(context.Context, boil.ContextExecutor, *ComponentSynonym) error + + componentSynonymQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + componentSynonymType = reflect.TypeOf(&ComponentSynonym{}) + componentSynonymMapping = queries.MakeStructMapping(componentSynonymType) + componentSynonymPrimaryKeyMapping, _ = queries.BindMapping(componentSynonymType, componentSynonymMapping, componentSynonymPrimaryKeyColumns) + componentSynonymInsertCacheMut sync.RWMutex + componentSynonymInsertCache = make(map[string]insertCache) + componentSynonymUpdateCacheMut sync.RWMutex + componentSynonymUpdateCache = make(map[string]updateCache) + componentSynonymUpsertCacheMut sync.RWMutex + componentSynonymUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var componentSynonymAfterSelectHooks []ComponentSynonymHook + +var componentSynonymBeforeInsertHooks []ComponentSynonymHook +var componentSynonymAfterInsertHooks []ComponentSynonymHook + +var componentSynonymBeforeUpdateHooks []ComponentSynonymHook +var componentSynonymAfterUpdateHooks []ComponentSynonymHook + +var componentSynonymBeforeDeleteHooks []ComponentSynonymHook +var componentSynonymAfterDeleteHooks []ComponentSynonymHook + +var componentSynonymBeforeUpsertHooks []ComponentSynonymHook +var componentSynonymAfterUpsertHooks []ComponentSynonymHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *ComponentSynonym) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range componentSynonymAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *ComponentSynonym) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range componentSynonymBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *ComponentSynonym) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range componentSynonymAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *ComponentSynonym) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range componentSynonymBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *ComponentSynonym) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range componentSynonymAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *ComponentSynonym) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range componentSynonymBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *ComponentSynonym) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range componentSynonymAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *ComponentSynonym) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range componentSynonymBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *ComponentSynonym) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range componentSynonymAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddComponentSynonymHook registers your hook function for all future operations. +func AddComponentSynonymHook(hookPoint boil.HookPoint, componentSynonymHook ComponentSynonymHook) { + switch hookPoint { + case boil.AfterSelectHook: + componentSynonymAfterSelectHooks = append(componentSynonymAfterSelectHooks, componentSynonymHook) + case boil.BeforeInsertHook: + componentSynonymBeforeInsertHooks = append(componentSynonymBeforeInsertHooks, componentSynonymHook) + case boil.AfterInsertHook: + componentSynonymAfterInsertHooks = append(componentSynonymAfterInsertHooks, componentSynonymHook) + case boil.BeforeUpdateHook: + componentSynonymBeforeUpdateHooks = append(componentSynonymBeforeUpdateHooks, componentSynonymHook) + case boil.AfterUpdateHook: + componentSynonymAfterUpdateHooks = append(componentSynonymAfterUpdateHooks, componentSynonymHook) + case boil.BeforeDeleteHook: + componentSynonymBeforeDeleteHooks = append(componentSynonymBeforeDeleteHooks, componentSynonymHook) + case boil.AfterDeleteHook: + componentSynonymAfterDeleteHooks = append(componentSynonymAfterDeleteHooks, componentSynonymHook) + case boil.BeforeUpsertHook: + componentSynonymBeforeUpsertHooks = append(componentSynonymBeforeUpsertHooks, componentSynonymHook) + case boil.AfterUpsertHook: + componentSynonymAfterUpsertHooks = append(componentSynonymAfterUpsertHooks, componentSynonymHook) + } +} + +// One returns a single componentSynonym record from the query. +func (q componentSynonymQuery) One(ctx context.Context, exec boil.ContextExecutor) (*ComponentSynonym, error) { + o := &ComponentSynonym{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for component_synonyms") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all ComponentSynonym records from the query. +func (q componentSynonymQuery) All(ctx context.Context, exec boil.ContextExecutor) (ComponentSynonymSlice, error) { + var o []*ComponentSynonym + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to ComponentSynonym slice") + } + + if len(componentSynonymAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all ComponentSynonym records in the query. +func (q componentSynonymQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count component_synonyms rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q componentSynonymQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if component_synonyms exists") + } + + return count > 0, nil +} + +// Component pointed to by the foreign key. +func (o *ComponentSynonym) Component(mods ...qm.QueryMod) componentSequenceQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"component_id\" = ?", o.ComponentID), + } + + queryMods = append(queryMods, mods...) + + return ComponentSequences(queryMods...) +} + +// LoadComponent allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (componentSynonymL) LoadComponent(ctx context.Context, e boil.ContextExecutor, singular bool, maybeComponentSynonym interface{}, mods queries.Applicator) error { + var slice []*ComponentSynonym + var object *ComponentSynonym + + if singular { + object = maybeComponentSynonym.(*ComponentSynonym) + } else { + slice = *maybeComponentSynonym.(*[]*ComponentSynonym) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &componentSynonymR{} + } + args = append(args, object.ComponentID) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &componentSynonymR{} + } + + for _, a := range args { + if a == obj.ComponentID { + continue Outer + } + } + + args = append(args, obj.ComponentID) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`component_sequences`), + qm.WhereIn(`component_sequences.component_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load ComponentSequence") + } + + var resultSlice []*ComponentSequence + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice ComponentSequence") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for component_sequences") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for component_sequences") + } + + if len(componentSynonymAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Component = foreign + if foreign.R == nil { + foreign.R = &componentSequenceR{} + } + foreign.R.ComponentComponentSynonyms = append(foreign.R.ComponentComponentSynonyms, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.ComponentID == foreign.ComponentID { + local.R.Component = foreign + if foreign.R == nil { + foreign.R = &componentSequenceR{} + } + foreign.R.ComponentComponentSynonyms = append(foreign.R.ComponentComponentSynonyms, local) + break + } + } + } + + return nil +} + +// SetComponent of the componentSynonym to the related item. +// Sets o.R.Component to related. +// Adds o to related.R.ComponentComponentSynonyms. +func (o *ComponentSynonym) SetComponent(ctx context.Context, exec boil.ContextExecutor, insert bool, related *ComponentSequence) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"component_synonyms\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"component_id"}), + strmangle.WhereClause("\"", "\"", 0, componentSynonymPrimaryKeyColumns), + ) + values := []interface{}{related.ComponentID, o.CompsynID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.ComponentID = related.ComponentID + if o.R == nil { + o.R = &componentSynonymR{ + Component: related, + } + } else { + o.R.Component = related + } + + if related.R == nil { + related.R = &componentSequenceR{ + ComponentComponentSynonyms: ComponentSynonymSlice{o}, + } + } else { + related.R.ComponentComponentSynonyms = append(related.R.ComponentComponentSynonyms, o) + } + + return nil +} + +// ComponentSynonyms retrieves all the records using an executor. +func ComponentSynonyms(mods ...qm.QueryMod) componentSynonymQuery { + mods = append(mods, qm.From("\"component_synonyms\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"component_synonyms\".*"}) + } + + return componentSynonymQuery{q} +} + +// FindComponentSynonym retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindComponentSynonym(ctx context.Context, exec boil.ContextExecutor, compsynID int64, selectCols ...string) (*ComponentSynonym, error) { + componentSynonymObj := &ComponentSynonym{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"component_synonyms\" where \"compsyn_id\"=?", sel, + ) + + q := queries.Raw(query, compsynID) + + err := q.Bind(ctx, exec, componentSynonymObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from component_synonyms") + } + + if err = componentSynonymObj.doAfterSelectHooks(ctx, exec); err != nil { + return componentSynonymObj, err + } + + return componentSynonymObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *ComponentSynonym) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no component_synonyms provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(componentSynonymColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + componentSynonymInsertCacheMut.RLock() + cache, cached := componentSynonymInsertCache[key] + componentSynonymInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + componentSynonymAllColumns, + componentSynonymColumnsWithDefault, + componentSynonymColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(componentSynonymType, componentSynonymMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(componentSynonymType, componentSynonymMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"component_synonyms\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"component_synonyms\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into component_synonyms") + } + + if !cached { + componentSynonymInsertCacheMut.Lock() + componentSynonymInsertCache[key] = cache + componentSynonymInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the ComponentSynonym. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *ComponentSynonym) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + componentSynonymUpdateCacheMut.RLock() + cache, cached := componentSynonymUpdateCache[key] + componentSynonymUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + componentSynonymAllColumns, + componentSynonymPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update component_synonyms, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"component_synonyms\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, componentSynonymPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(componentSynonymType, componentSynonymMapping, append(wl, componentSynonymPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update component_synonyms row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for component_synonyms") + } + + if !cached { + componentSynonymUpdateCacheMut.Lock() + componentSynonymUpdateCache[key] = cache + componentSynonymUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q componentSynonymQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for component_synonyms") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for component_synonyms") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o ComponentSynonymSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), componentSynonymPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"component_synonyms\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, componentSynonymPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in componentSynonym slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all componentSynonym") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *ComponentSynonym) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no component_synonyms provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(componentSynonymColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + componentSynonymUpsertCacheMut.RLock() + cache, cached := componentSynonymUpsertCache[key] + componentSynonymUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + componentSynonymAllColumns, + componentSynonymColumnsWithDefault, + componentSynonymColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + componentSynonymAllColumns, + componentSynonymPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert component_synonyms, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(componentSynonymPrimaryKeyColumns)) + copy(conflict, componentSynonymPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"component_synonyms\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(componentSynonymType, componentSynonymMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(componentSynonymType, componentSynonymMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert component_synonyms") + } + + if !cached { + componentSynonymUpsertCacheMut.Lock() + componentSynonymUpsertCache[key] = cache + componentSynonymUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single ComponentSynonym record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *ComponentSynonym) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no ComponentSynonym provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), componentSynonymPrimaryKeyMapping) + sql := "DELETE FROM \"component_synonyms\" WHERE \"compsyn_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from component_synonyms") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for component_synonyms") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q componentSynonymQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no componentSynonymQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from component_synonyms") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for component_synonyms") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o ComponentSynonymSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(componentSynonymBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), componentSynonymPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"component_synonyms\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, componentSynonymPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from componentSynonym slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for component_synonyms") + } + + if len(componentSynonymAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *ComponentSynonym) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindComponentSynonym(ctx, exec, o.CompsynID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *ComponentSynonymSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := ComponentSynonymSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), componentSynonymPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"component_synonyms\".* FROM \"component_synonyms\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, componentSynonymPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in ComponentSynonymSlice") + } + + *o = slice + + return nil +} + +// ComponentSynonymExists checks if the ComponentSynonym row exists. +func ComponentSynonymExists(ctx context.Context, exec boil.ContextExecutor, compsynID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"component_synonyms\" where \"compsyn_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, compsynID) + } + row := exec.QueryRowContext(ctx, sql, compsynID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if component_synonyms exists") + } + + return exists, nil +} diff --git a/models/compound.go b/models/compound.go new file mode 100644 index 0000000..706877f --- /dev/null +++ b/models/compound.go @@ -0,0 +1,1580 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// Compound is an object representing the database table. +type Compound struct { + ID int64 `boil:"id" json:"id" toml:"id" yaml:"id"` + Accession null.String `boil:"accession" json:"accession,omitempty" toml:"accession" yaml:"accession,omitempty"` + Position null.String `boil:"position" json:"position,omitempty" toml:"position" yaml:"position,omitempty"` + Name null.String `boil:"name" json:"name,omitempty" toml:"name" yaml:"name,omitempty"` + HTMLName null.String `boil:"html_name" json:"html_name,omitempty" toml:"html_name" yaml:"html_name,omitempty"` + Formula null.String `boil:"formula" json:"formula,omitempty" toml:"formula" yaml:"formula,omitempty"` + Charge null.String `boil:"charge" json:"charge,omitempty" toml:"charge" yaml:"charge,omitempty"` + Chebi null.String `boil:"chebi" json:"chebi,omitempty" toml:"chebi" yaml:"chebi,omitempty"` + PolymerizationIndex null.String `boil:"polymerization_index" json:"polymerization_index,omitempty" toml:"polymerization_index" yaml:"polymerization_index,omitempty"` + CompoundType string `boil:"compound_type" json:"compound_type" toml:"compound_type" yaml:"compound_type"` + + R *compoundR `boil:"-" json:"-" toml:"-" yaml:"-"` + L compoundL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var CompoundColumns = struct { + ID string + Accession string + Position string + Name string + HTMLName string + Formula string + Charge string + Chebi string + PolymerizationIndex string + CompoundType string +}{ + ID: "id", + Accession: "accession", + Position: "position", + Name: "name", + HTMLName: "html_name", + Formula: "formula", + Charge: "charge", + Chebi: "chebi", + PolymerizationIndex: "polymerization_index", + CompoundType: "compound_type", +} + +var CompoundTableColumns = struct { + ID string + Accession string + Position string + Name string + HTMLName string + Formula string + Charge string + Chebi string + PolymerizationIndex string + CompoundType string +}{ + ID: "compound.id", + Accession: "compound.accession", + Position: "compound.position", + Name: "compound.name", + HTMLName: "compound.html_name", + Formula: "compound.formula", + Charge: "compound.charge", + Chebi: "compound.chebi", + PolymerizationIndex: "compound.polymerization_index", + CompoundType: "compound.compound_type", +} + +// Generated where + +var CompoundWhere = struct { + ID whereHelperint64 + Accession whereHelpernull_String + Position whereHelpernull_String + Name whereHelpernull_String + HTMLName whereHelpernull_String + Formula whereHelpernull_String + Charge whereHelpernull_String + Chebi whereHelpernull_String + PolymerizationIndex whereHelpernull_String + CompoundType whereHelperstring +}{ + ID: whereHelperint64{field: "\"compound\".\"id\""}, + Accession: whereHelpernull_String{field: "\"compound\".\"accession\""}, + Position: whereHelpernull_String{field: "\"compound\".\"position\""}, + Name: whereHelpernull_String{field: "\"compound\".\"name\""}, + HTMLName: whereHelpernull_String{field: "\"compound\".\"html_name\""}, + Formula: whereHelpernull_String{field: "\"compound\".\"formula\""}, + Charge: whereHelpernull_String{field: "\"compound\".\"charge\""}, + Chebi: whereHelpernull_String{field: "\"compound\".\"chebi\""}, + PolymerizationIndex: whereHelpernull_String{field: "\"compound\".\"polymerization_index\""}, + CompoundType: whereHelperstring{field: "\"compound\".\"compound_type\""}, +} + +// CompoundRels is where relationship names are stored. +var CompoundRels = struct { + CompoundChebi string + ReactionParticipants string + ReactiveParts string +}{ + CompoundChebi: "CompoundChebi", + ReactionParticipants: "ReactionParticipants", + ReactiveParts: "ReactiveParts", +} + +// compoundR is where relationships are stored. +type compoundR struct { + CompoundChebi *Chebi `boil:"CompoundChebi" json:"CompoundChebi" toml:"CompoundChebi" yaml:"CompoundChebi"` + ReactionParticipants ReactionParticipantSlice `boil:"ReactionParticipants" json:"ReactionParticipants" toml:"ReactionParticipants" yaml:"ReactionParticipants"` + ReactiveParts ReactivePartSlice `boil:"ReactiveParts" json:"ReactiveParts" toml:"ReactiveParts" yaml:"ReactiveParts"` +} + +// NewStruct creates a new relationship struct +func (*compoundR) NewStruct() *compoundR { + return &compoundR{} +} + +func (r *compoundR) GetCompoundChebi() *Chebi { + if r == nil { + return nil + } + return r.CompoundChebi +} + +func (r *compoundR) GetReactionParticipants() ReactionParticipantSlice { + if r == nil { + return nil + } + return r.ReactionParticipants +} + +func (r *compoundR) GetReactiveParts() ReactivePartSlice { + if r == nil { + return nil + } + return r.ReactiveParts +} + +// compoundL is where Load methods for each relationship are stored. +type compoundL struct{} + +var ( + compoundAllColumns = []string{"id", "accession", "position", "name", "html_name", "formula", "charge", "chebi", "polymerization_index", "compound_type"} + compoundColumnsWithoutDefault = []string{"id", "compound_type"} + compoundColumnsWithDefault = []string{"accession", "position", "name", "html_name", "formula", "charge", "chebi", "polymerization_index"} + compoundPrimaryKeyColumns = []string{"accession"} + compoundGeneratedColumns = []string{} +) + +type ( + // CompoundSlice is an alias for a slice of pointers to Compound. + // This should almost always be used instead of []Compound. + CompoundSlice []*Compound + // CompoundHook is the signature for custom Compound hook methods + CompoundHook func(context.Context, boil.ContextExecutor, *Compound) error + + compoundQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + compoundType = reflect.TypeOf(&Compound{}) + compoundMapping = queries.MakeStructMapping(compoundType) + compoundPrimaryKeyMapping, _ = queries.BindMapping(compoundType, compoundMapping, compoundPrimaryKeyColumns) + compoundInsertCacheMut sync.RWMutex + compoundInsertCache = make(map[string]insertCache) + compoundUpdateCacheMut sync.RWMutex + compoundUpdateCache = make(map[string]updateCache) + compoundUpsertCacheMut sync.RWMutex + compoundUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var compoundAfterSelectHooks []CompoundHook + +var compoundBeforeInsertHooks []CompoundHook +var compoundAfterInsertHooks []CompoundHook + +var compoundBeforeUpdateHooks []CompoundHook +var compoundAfterUpdateHooks []CompoundHook + +var compoundBeforeDeleteHooks []CompoundHook +var compoundAfterDeleteHooks []CompoundHook + +var compoundBeforeUpsertHooks []CompoundHook +var compoundAfterUpsertHooks []CompoundHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *Compound) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range compoundAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *Compound) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range compoundBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *Compound) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range compoundAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *Compound) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range compoundBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *Compound) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range compoundAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *Compound) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range compoundBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *Compound) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range compoundAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *Compound) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range compoundBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *Compound) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range compoundAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddCompoundHook registers your hook function for all future operations. +func AddCompoundHook(hookPoint boil.HookPoint, compoundHook CompoundHook) { + switch hookPoint { + case boil.AfterSelectHook: + compoundAfterSelectHooks = append(compoundAfterSelectHooks, compoundHook) + case boil.BeforeInsertHook: + compoundBeforeInsertHooks = append(compoundBeforeInsertHooks, compoundHook) + case boil.AfterInsertHook: + compoundAfterInsertHooks = append(compoundAfterInsertHooks, compoundHook) + case boil.BeforeUpdateHook: + compoundBeforeUpdateHooks = append(compoundBeforeUpdateHooks, compoundHook) + case boil.AfterUpdateHook: + compoundAfterUpdateHooks = append(compoundAfterUpdateHooks, compoundHook) + case boil.BeforeDeleteHook: + compoundBeforeDeleteHooks = append(compoundBeforeDeleteHooks, compoundHook) + case boil.AfterDeleteHook: + compoundAfterDeleteHooks = append(compoundAfterDeleteHooks, compoundHook) + case boil.BeforeUpsertHook: + compoundBeforeUpsertHooks = append(compoundBeforeUpsertHooks, compoundHook) + case boil.AfterUpsertHook: + compoundAfterUpsertHooks = append(compoundAfterUpsertHooks, compoundHook) + } +} + +// One returns a single compound record from the query. +func (q compoundQuery) One(ctx context.Context, exec boil.ContextExecutor) (*Compound, error) { + o := &Compound{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for compound") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all Compound records from the query. +func (q compoundQuery) All(ctx context.Context, exec boil.ContextExecutor) (CompoundSlice, error) { + var o []*Compound + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to Compound slice") + } + + if len(compoundAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all Compound records in the query. +func (q compoundQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count compound rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q compoundQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if compound exists") + } + + return count > 0, nil +} + +// CompoundChebi pointed to by the foreign key. +func (o *Compound) CompoundChebi(mods ...qm.QueryMod) chebiQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"accession\" = ?", o.Chebi), + } + + queryMods = append(queryMods, mods...) + + return Chebis(queryMods...) +} + +// ReactionParticipants retrieves all the reaction_participant's ReactionParticipants with an executor. +func (o *Compound) ReactionParticipants(mods ...qm.QueryMod) reactionParticipantQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"reaction_participant\".\"compound\"=?", o.Accession), + ) + + return ReactionParticipants(queryMods...) +} + +// ReactiveParts retrieves all the reactive_part's ReactiveParts with an executor. +func (o *Compound) ReactiveParts(mods ...qm.QueryMod) reactivePartQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"reactive_part\".\"compound\"=?", o.Accession), + ) + + return ReactiveParts(queryMods...) +} + +// LoadCompoundChebi allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (compoundL) LoadCompoundChebi(ctx context.Context, e boil.ContextExecutor, singular bool, maybeCompound interface{}, mods queries.Applicator) error { + var slice []*Compound + var object *Compound + + if singular { + object = maybeCompound.(*Compound) + } else { + slice = *maybeCompound.(*[]*Compound) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &compoundR{} + } + if !queries.IsNil(object.Chebi) { + args = append(args, object.Chebi) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &compoundR{} + } + + for _, a := range args { + if queries.Equal(a, obj.Chebi) { + continue Outer + } + } + + if !queries.IsNil(obj.Chebi) { + args = append(args, obj.Chebi) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`chebi`), + qm.WhereIn(`chebi.accession in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load Chebi") + } + + var resultSlice []*Chebi + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice Chebi") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for chebi") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for chebi") + } + + if len(compoundAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.CompoundChebi = foreign + if foreign.R == nil { + foreign.R = &chebiR{} + } + foreign.R.Compounds = append(foreign.R.Compounds, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.Chebi, foreign.Accession) { + local.R.CompoundChebi = foreign + if foreign.R == nil { + foreign.R = &chebiR{} + } + foreign.R.Compounds = append(foreign.R.Compounds, local) + break + } + } + } + + return nil +} + +// LoadReactionParticipants allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (compoundL) LoadReactionParticipants(ctx context.Context, e boil.ContextExecutor, singular bool, maybeCompound interface{}, mods queries.Applicator) error { + var slice []*Compound + var object *Compound + + if singular { + object = maybeCompound.(*Compound) + } else { + slice = *maybeCompound.(*[]*Compound) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &compoundR{} + } + args = append(args, object.Accession) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &compoundR{} + } + + for _, a := range args { + if queries.Equal(a, obj.Accession) { + continue Outer + } + } + + args = append(args, obj.Accession) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`reaction_participant`), + qm.WhereIn(`reaction_participant.compound in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load reaction_participant") + } + + var resultSlice []*ReactionParticipant + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice reaction_participant") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on reaction_participant") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for reaction_participant") + } + + if len(reactionParticipantAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.ReactionParticipants = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &reactionParticipantR{} + } + foreign.R.ReactionParticipantCompound = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.Accession, foreign.Compound) { + local.R.ReactionParticipants = append(local.R.ReactionParticipants, foreign) + if foreign.R == nil { + foreign.R = &reactionParticipantR{} + } + foreign.R.ReactionParticipantCompound = local + break + } + } + } + + return nil +} + +// LoadReactiveParts allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (compoundL) LoadReactiveParts(ctx context.Context, e boil.ContextExecutor, singular bool, maybeCompound interface{}, mods queries.Applicator) error { + var slice []*Compound + var object *Compound + + if singular { + object = maybeCompound.(*Compound) + } else { + slice = *maybeCompound.(*[]*Compound) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &compoundR{} + } + args = append(args, object.Accession) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &compoundR{} + } + + for _, a := range args { + if queries.Equal(a, obj.Accession) { + continue Outer + } + } + + args = append(args, obj.Accession) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`reactive_part`), + qm.WhereIn(`reactive_part.compound in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load reactive_part") + } + + var resultSlice []*ReactivePart + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice reactive_part") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on reactive_part") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for reactive_part") + } + + if len(reactivePartAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.ReactiveParts = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &reactivePartR{} + } + foreign.R.ReactivePartCompound = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.Accession, foreign.Compound) { + local.R.ReactiveParts = append(local.R.ReactiveParts, foreign) + if foreign.R == nil { + foreign.R = &reactivePartR{} + } + foreign.R.ReactivePartCompound = local + break + } + } + } + + return nil +} + +// SetCompoundChebi of the compound to the related item. +// Sets o.R.CompoundChebi to related. +// Adds o to related.R.Compounds. +func (o *Compound) SetCompoundChebi(ctx context.Context, exec boil.ContextExecutor, insert bool, related *Chebi) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"compound\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"chebi"}), + strmangle.WhereClause("\"", "\"", 0, compoundPrimaryKeyColumns), + ) + values := []interface{}{related.Accession, o.Accession} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.Chebi, related.Accession) + if o.R == nil { + o.R = &compoundR{ + CompoundChebi: related, + } + } else { + o.R.CompoundChebi = related + } + + if related.R == nil { + related.R = &chebiR{ + Compounds: CompoundSlice{o}, + } + } else { + related.R.Compounds = append(related.R.Compounds, o) + } + + return nil +} + +// RemoveCompoundChebi relationship. +// Sets o.R.CompoundChebi to nil. +// Removes o from all passed in related items' relationships struct. +func (o *Compound) RemoveCompoundChebi(ctx context.Context, exec boil.ContextExecutor, related *Chebi) error { + var err error + + queries.SetScanner(&o.Chebi, nil) + if _, err = o.Update(ctx, exec, boil.Whitelist("chebi")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.CompoundChebi = nil + } + if related == nil || related.R == nil { + return nil + } + + for i, ri := range related.R.Compounds { + if queries.Equal(o.Chebi, ri.Chebi) { + continue + } + + ln := len(related.R.Compounds) + if ln > 1 && i < ln-1 { + related.R.Compounds[i] = related.R.Compounds[ln-1] + } + related.R.Compounds = related.R.Compounds[:ln-1] + break + } + return nil +} + +// AddReactionParticipants adds the given related objects to the existing relationships +// of the compound, optionally inserting them as new records. +// Appends related to o.R.ReactionParticipants. +// Sets related.R.ReactionParticipantCompound appropriately. +func (o *Compound) AddReactionParticipants(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*ReactionParticipant) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.Compound, o.Accession) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"reaction_participant\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"compound"}), + strmangle.WhereClause("\"", "\"", 0, reactionParticipantPrimaryKeyColumns), + ) + values := []interface{}{o.Accession, rel.Compound, rel.Reactionside} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.Compound, o.Accession) + } + } + + if o.R == nil { + o.R = &compoundR{ + ReactionParticipants: related, + } + } else { + o.R.ReactionParticipants = append(o.R.ReactionParticipants, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &reactionParticipantR{ + ReactionParticipantCompound: o, + } + } else { + rel.R.ReactionParticipantCompound = o + } + } + return nil +} + +// SetReactionParticipants removes all previously related items of the +// compound replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.ReactionParticipantCompound's ReactionParticipants accordingly. +// Replaces o.R.ReactionParticipants with related. +// Sets related.R.ReactionParticipantCompound's ReactionParticipants accordingly. +func (o *Compound) SetReactionParticipants(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*ReactionParticipant) error { + query := "update \"reaction_participant\" set \"compound\" = null where \"compound\" = ?" + values := []interface{}{o.Accession} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + if o.R != nil { + for _, rel := range o.R.ReactionParticipants { + queries.SetScanner(&rel.Compound, nil) + if rel.R == nil { + continue + } + + rel.R.ReactionParticipantCompound = nil + } + o.R.ReactionParticipants = nil + } + + return o.AddReactionParticipants(ctx, exec, insert, related...) +} + +// RemoveReactionParticipants relationships from objects passed in. +// Removes related items from R.ReactionParticipants (uses pointer comparison, removal does not keep order) +// Sets related.R.ReactionParticipantCompound. +func (o *Compound) RemoveReactionParticipants(ctx context.Context, exec boil.ContextExecutor, related ...*ReactionParticipant) error { + if len(related) == 0 { + return nil + } + + var err error + for _, rel := range related { + queries.SetScanner(&rel.Compound, nil) + if rel.R != nil { + rel.R.ReactionParticipantCompound = nil + } + if _, err = rel.Update(ctx, exec, boil.Whitelist("compound")); err != nil { + return err + } + } + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.ReactionParticipants { + if rel != ri { + continue + } + + ln := len(o.R.ReactionParticipants) + if ln > 1 && i < ln-1 { + o.R.ReactionParticipants[i] = o.R.ReactionParticipants[ln-1] + } + o.R.ReactionParticipants = o.R.ReactionParticipants[:ln-1] + break + } + } + + return nil +} + +// AddReactiveParts adds the given related objects to the existing relationships +// of the compound, optionally inserting them as new records. +// Appends related to o.R.ReactiveParts. +// Sets related.R.ReactivePartCompound appropriately. +func (o *Compound) AddReactiveParts(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*ReactivePart) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.Compound, o.Accession) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"reactive_part\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"compound"}), + strmangle.WhereClause("\"", "\"", 0, reactivePartPrimaryKeyColumns), + ) + values := []interface{}{o.Accession, rel.Accession} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.Compound, o.Accession) + } + } + + if o.R == nil { + o.R = &compoundR{ + ReactiveParts: related, + } + } else { + o.R.ReactiveParts = append(o.R.ReactiveParts, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &reactivePartR{ + ReactivePartCompound: o, + } + } else { + rel.R.ReactivePartCompound = o + } + } + return nil +} + +// Compounds retrieves all the records using an executor. +func Compounds(mods ...qm.QueryMod) compoundQuery { + mods = append(mods, qm.From("\"compound\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"compound\".*"}) + } + + return compoundQuery{q} +} + +// FindCompound retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindCompound(ctx context.Context, exec boil.ContextExecutor, accession null.String, selectCols ...string) (*Compound, error) { + compoundObj := &Compound{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"compound\" where \"accession\"=?", sel, + ) + + q := queries.Raw(query, accession) + + err := q.Bind(ctx, exec, compoundObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from compound") + } + + if err = compoundObj.doAfterSelectHooks(ctx, exec); err != nil { + return compoundObj, err + } + + return compoundObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *Compound) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no compound provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(compoundColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + compoundInsertCacheMut.RLock() + cache, cached := compoundInsertCache[key] + compoundInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + compoundAllColumns, + compoundColumnsWithDefault, + compoundColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(compoundType, compoundMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(compoundType, compoundMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"compound\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"compound\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into compound") + } + + if !cached { + compoundInsertCacheMut.Lock() + compoundInsertCache[key] = cache + compoundInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the Compound. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *Compound) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + compoundUpdateCacheMut.RLock() + cache, cached := compoundUpdateCache[key] + compoundUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + compoundAllColumns, + compoundPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update compound, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"compound\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, compoundPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(compoundType, compoundMapping, append(wl, compoundPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update compound row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for compound") + } + + if !cached { + compoundUpdateCacheMut.Lock() + compoundUpdateCache[key] = cache + compoundUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q compoundQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for compound") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for compound") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o CompoundSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), compoundPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"compound\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, compoundPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in compound slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all compound") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *Compound) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no compound provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(compoundColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + compoundUpsertCacheMut.RLock() + cache, cached := compoundUpsertCache[key] + compoundUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + compoundAllColumns, + compoundColumnsWithDefault, + compoundColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + compoundAllColumns, + compoundPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert compound, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(compoundPrimaryKeyColumns)) + copy(conflict, compoundPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"compound\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(compoundType, compoundMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(compoundType, compoundMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert compound") + } + + if !cached { + compoundUpsertCacheMut.Lock() + compoundUpsertCache[key] = cache + compoundUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single Compound record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *Compound) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no Compound provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), compoundPrimaryKeyMapping) + sql := "DELETE FROM \"compound\" WHERE \"accession\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from compound") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for compound") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q compoundQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no compoundQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from compound") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for compound") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o CompoundSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(compoundBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), compoundPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"compound\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, compoundPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from compound slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for compound") + } + + if len(compoundAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *Compound) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindCompound(ctx, exec, o.Accession) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *CompoundSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := CompoundSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), compoundPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"compound\".* FROM \"compound\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, compoundPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in CompoundSlice") + } + + *o = slice + + return nil +} + +// CompoundExists checks if the Compound row exists. +func CompoundExists(ctx context.Context, exec boil.ContextExecutor, accession null.String) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"compound\" where \"accession\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, accession) + } + row := exec.QueryRowContext(ctx, sql, accession) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if compound exists") + } + + return exists, nil +} diff --git a/models/compound_properties.go b/models/compound_properties.go new file mode 100644 index 0000000..4cb9b31 --- /dev/null +++ b/models/compound_properties.go @@ -0,0 +1,1211 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/sqlboiler/v4/types" + "github.com/volatiletech/strmangle" +) + +// CompoundProperty is an object representing the database table. +type CompoundProperty struct { + Molregno int64 `boil:"molregno" json:"molregno" toml:"molregno" yaml:"molregno"` + MWFreebase types.NullDecimal `boil:"mw_freebase" json:"mw_freebase,omitempty" toml:"mw_freebase" yaml:"mw_freebase,omitempty"` + Alogp types.NullDecimal `boil:"alogp" json:"alogp,omitempty" toml:"alogp" yaml:"alogp,omitempty"` + Hba null.Int64 `boil:"hba" json:"hba,omitempty" toml:"hba" yaml:"hba,omitempty"` + HBD null.Int64 `boil:"hbd" json:"hbd,omitempty" toml:"hbd" yaml:"hbd,omitempty"` + Psa types.NullDecimal `boil:"psa" json:"psa,omitempty" toml:"psa" yaml:"psa,omitempty"` + RTB null.Int64 `boil:"rtb" json:"rtb,omitempty" toml:"rtb" yaml:"rtb,omitempty"` + Ro3Pass null.String `boil:"ro3_pass" json:"ro3_pass,omitempty" toml:"ro3_pass" yaml:"ro3_pass,omitempty"` + NumRo5Violations null.Int16 `boil:"num_ro5_violations" json:"num_ro5_violations,omitempty" toml:"num_ro5_violations" yaml:"num_ro5_violations,omitempty"` + CXMostApka types.NullDecimal `boil:"cx_most_apka" json:"cx_most_apka,omitempty" toml:"cx_most_apka" yaml:"cx_most_apka,omitempty"` + CXMostBpka types.NullDecimal `boil:"cx_most_bpka" json:"cx_most_bpka,omitempty" toml:"cx_most_bpka" yaml:"cx_most_bpka,omitempty"` + CXLogp types.NullDecimal `boil:"cx_logp" json:"cx_logp,omitempty" toml:"cx_logp" yaml:"cx_logp,omitempty"` + CXLogd types.NullDecimal `boil:"cx_logd" json:"cx_logd,omitempty" toml:"cx_logd" yaml:"cx_logd,omitempty"` + MolecularSpecies null.String `boil:"molecular_species" json:"molecular_species,omitempty" toml:"molecular_species" yaml:"molecular_species,omitempty"` + FullMWT types.NullDecimal `boil:"full_mwt" json:"full_mwt,omitempty" toml:"full_mwt" yaml:"full_mwt,omitempty"` + AromaticRings null.Int64 `boil:"aromatic_rings" json:"aromatic_rings,omitempty" toml:"aromatic_rings" yaml:"aromatic_rings,omitempty"` + HeavyAtoms null.Int64 `boil:"heavy_atoms" json:"heavy_atoms,omitempty" toml:"heavy_atoms" yaml:"heavy_atoms,omitempty"` + QedWeighted types.NullDecimal `boil:"qed_weighted" json:"qed_weighted,omitempty" toml:"qed_weighted" yaml:"qed_weighted,omitempty"` + MWMonoisotopic types.NullDecimal `boil:"mw_monoisotopic" json:"mw_monoisotopic,omitempty" toml:"mw_monoisotopic" yaml:"mw_monoisotopic,omitempty"` + FullMolformula null.String `boil:"full_molformula" json:"full_molformula,omitempty" toml:"full_molformula" yaml:"full_molformula,omitempty"` + HbaLipinski null.Int64 `boil:"hba_lipinski" json:"hba_lipinski,omitempty" toml:"hba_lipinski" yaml:"hba_lipinski,omitempty"` + HBDLipinski null.Int64 `boil:"hbd_lipinski" json:"hbd_lipinski,omitempty" toml:"hbd_lipinski" yaml:"hbd_lipinski,omitempty"` + NumLipinskiRo5Violations null.Int16 `boil:"num_lipinski_ro5_violations" json:"num_lipinski_ro5_violations,omitempty" toml:"num_lipinski_ro5_violations" yaml:"num_lipinski_ro5_violations,omitempty"` + + R *compoundPropertyR `boil:"-" json:"-" toml:"-" yaml:"-"` + L compoundPropertyL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var CompoundPropertyColumns = struct { + Molregno string + MWFreebase string + Alogp string + Hba string + HBD string + Psa string + RTB string + Ro3Pass string + NumRo5Violations string + CXMostApka string + CXMostBpka string + CXLogp string + CXLogd string + MolecularSpecies string + FullMWT string + AromaticRings string + HeavyAtoms string + QedWeighted string + MWMonoisotopic string + FullMolformula string + HbaLipinski string + HBDLipinski string + NumLipinskiRo5Violations string +}{ + Molregno: "molregno", + MWFreebase: "mw_freebase", + Alogp: "alogp", + Hba: "hba", + HBD: "hbd", + Psa: "psa", + RTB: "rtb", + Ro3Pass: "ro3_pass", + NumRo5Violations: "num_ro5_violations", + CXMostApka: "cx_most_apka", + CXMostBpka: "cx_most_bpka", + CXLogp: "cx_logp", + CXLogd: "cx_logd", + MolecularSpecies: "molecular_species", + FullMWT: "full_mwt", + AromaticRings: "aromatic_rings", + HeavyAtoms: "heavy_atoms", + QedWeighted: "qed_weighted", + MWMonoisotopic: "mw_monoisotopic", + FullMolformula: "full_molformula", + HbaLipinski: "hba_lipinski", + HBDLipinski: "hbd_lipinski", + NumLipinskiRo5Violations: "num_lipinski_ro5_violations", +} + +var CompoundPropertyTableColumns = struct { + Molregno string + MWFreebase string + Alogp string + Hba string + HBD string + Psa string + RTB string + Ro3Pass string + NumRo5Violations string + CXMostApka string + CXMostBpka string + CXLogp string + CXLogd string + MolecularSpecies string + FullMWT string + AromaticRings string + HeavyAtoms string + QedWeighted string + MWMonoisotopic string + FullMolformula string + HbaLipinski string + HBDLipinski string + NumLipinskiRo5Violations string +}{ + Molregno: "compound_properties.molregno", + MWFreebase: "compound_properties.mw_freebase", + Alogp: "compound_properties.alogp", + Hba: "compound_properties.hba", + HBD: "compound_properties.hbd", + Psa: "compound_properties.psa", + RTB: "compound_properties.rtb", + Ro3Pass: "compound_properties.ro3_pass", + NumRo5Violations: "compound_properties.num_ro5_violations", + CXMostApka: "compound_properties.cx_most_apka", + CXMostBpka: "compound_properties.cx_most_bpka", + CXLogp: "compound_properties.cx_logp", + CXLogd: "compound_properties.cx_logd", + MolecularSpecies: "compound_properties.molecular_species", + FullMWT: "compound_properties.full_mwt", + AromaticRings: "compound_properties.aromatic_rings", + HeavyAtoms: "compound_properties.heavy_atoms", + QedWeighted: "compound_properties.qed_weighted", + MWMonoisotopic: "compound_properties.mw_monoisotopic", + FullMolformula: "compound_properties.full_molformula", + HbaLipinski: "compound_properties.hba_lipinski", + HBDLipinski: "compound_properties.hbd_lipinski", + NumLipinskiRo5Violations: "compound_properties.num_lipinski_ro5_violations", +} + +// Generated where + +var CompoundPropertyWhere = struct { + Molregno whereHelperint64 + MWFreebase whereHelpertypes_NullDecimal + Alogp whereHelpertypes_NullDecimal + Hba whereHelpernull_Int64 + HBD whereHelpernull_Int64 + Psa whereHelpertypes_NullDecimal + RTB whereHelpernull_Int64 + Ro3Pass whereHelpernull_String + NumRo5Violations whereHelpernull_Int16 + CXMostApka whereHelpertypes_NullDecimal + CXMostBpka whereHelpertypes_NullDecimal + CXLogp whereHelpertypes_NullDecimal + CXLogd whereHelpertypes_NullDecimal + MolecularSpecies whereHelpernull_String + FullMWT whereHelpertypes_NullDecimal + AromaticRings whereHelpernull_Int64 + HeavyAtoms whereHelpernull_Int64 + QedWeighted whereHelpertypes_NullDecimal + MWMonoisotopic whereHelpertypes_NullDecimal + FullMolformula whereHelpernull_String + HbaLipinski whereHelpernull_Int64 + HBDLipinski whereHelpernull_Int64 + NumLipinskiRo5Violations whereHelpernull_Int16 +}{ + Molregno: whereHelperint64{field: "\"compound_properties\".\"molregno\""}, + MWFreebase: whereHelpertypes_NullDecimal{field: "\"compound_properties\".\"mw_freebase\""}, + Alogp: whereHelpertypes_NullDecimal{field: "\"compound_properties\".\"alogp\""}, + Hba: whereHelpernull_Int64{field: "\"compound_properties\".\"hba\""}, + HBD: whereHelpernull_Int64{field: "\"compound_properties\".\"hbd\""}, + Psa: whereHelpertypes_NullDecimal{field: "\"compound_properties\".\"psa\""}, + RTB: whereHelpernull_Int64{field: "\"compound_properties\".\"rtb\""}, + Ro3Pass: whereHelpernull_String{field: "\"compound_properties\".\"ro3_pass\""}, + NumRo5Violations: whereHelpernull_Int16{field: "\"compound_properties\".\"num_ro5_violations\""}, + CXMostApka: whereHelpertypes_NullDecimal{field: "\"compound_properties\".\"cx_most_apka\""}, + CXMostBpka: whereHelpertypes_NullDecimal{field: "\"compound_properties\".\"cx_most_bpka\""}, + CXLogp: whereHelpertypes_NullDecimal{field: "\"compound_properties\".\"cx_logp\""}, + CXLogd: whereHelpertypes_NullDecimal{field: "\"compound_properties\".\"cx_logd\""}, + MolecularSpecies: whereHelpernull_String{field: "\"compound_properties\".\"molecular_species\""}, + FullMWT: whereHelpertypes_NullDecimal{field: "\"compound_properties\".\"full_mwt\""}, + AromaticRings: whereHelpernull_Int64{field: "\"compound_properties\".\"aromatic_rings\""}, + HeavyAtoms: whereHelpernull_Int64{field: "\"compound_properties\".\"heavy_atoms\""}, + QedWeighted: whereHelpertypes_NullDecimal{field: "\"compound_properties\".\"qed_weighted\""}, + MWMonoisotopic: whereHelpertypes_NullDecimal{field: "\"compound_properties\".\"mw_monoisotopic\""}, + FullMolformula: whereHelpernull_String{field: "\"compound_properties\".\"full_molformula\""}, + HbaLipinski: whereHelpernull_Int64{field: "\"compound_properties\".\"hba_lipinski\""}, + HBDLipinski: whereHelpernull_Int64{field: "\"compound_properties\".\"hbd_lipinski\""}, + NumLipinskiRo5Violations: whereHelpernull_Int16{field: "\"compound_properties\".\"num_lipinski_ro5_violations\""}, +} + +// CompoundPropertyRels is where relationship names are stored. +var CompoundPropertyRels = struct { + MolregnoMoleculeDictionary string +}{ + MolregnoMoleculeDictionary: "MolregnoMoleculeDictionary", +} + +// compoundPropertyR is where relationships are stored. +type compoundPropertyR struct { + MolregnoMoleculeDictionary *MoleculeDictionary `boil:"MolregnoMoleculeDictionary" json:"MolregnoMoleculeDictionary" toml:"MolregnoMoleculeDictionary" yaml:"MolregnoMoleculeDictionary"` +} + +// NewStruct creates a new relationship struct +func (*compoundPropertyR) NewStruct() *compoundPropertyR { + return &compoundPropertyR{} +} + +func (r *compoundPropertyR) GetMolregnoMoleculeDictionary() *MoleculeDictionary { + if r == nil { + return nil + } + return r.MolregnoMoleculeDictionary +} + +// compoundPropertyL is where Load methods for each relationship are stored. +type compoundPropertyL struct{} + +var ( + compoundPropertyAllColumns = []string{"molregno", "mw_freebase", "alogp", "hba", "hbd", "psa", "rtb", "ro3_pass", "num_ro5_violations", "cx_most_apka", "cx_most_bpka", "cx_logp", "cx_logd", "molecular_species", "full_mwt", "aromatic_rings", "heavy_atoms", "qed_weighted", "mw_monoisotopic", "full_molformula", "hba_lipinski", "hbd_lipinski", "num_lipinski_ro5_violations"} + compoundPropertyColumnsWithoutDefault = []string{"molregno"} + compoundPropertyColumnsWithDefault = []string{"mw_freebase", "alogp", "hba", "hbd", "psa", "rtb", "ro3_pass", "num_ro5_violations", "cx_most_apka", "cx_most_bpka", "cx_logp", "cx_logd", "molecular_species", "full_mwt", "aromatic_rings", "heavy_atoms", "qed_weighted", "mw_monoisotopic", "full_molformula", "hba_lipinski", "hbd_lipinski", "num_lipinski_ro5_violations"} + compoundPropertyPrimaryKeyColumns = []string{"molregno"} + compoundPropertyGeneratedColumns = []string{} +) + +type ( + // CompoundPropertySlice is an alias for a slice of pointers to CompoundProperty. + // This should almost always be used instead of []CompoundProperty. + CompoundPropertySlice []*CompoundProperty + // CompoundPropertyHook is the signature for custom CompoundProperty hook methods + CompoundPropertyHook func(context.Context, boil.ContextExecutor, *CompoundProperty) error + + compoundPropertyQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + compoundPropertyType = reflect.TypeOf(&CompoundProperty{}) + compoundPropertyMapping = queries.MakeStructMapping(compoundPropertyType) + compoundPropertyPrimaryKeyMapping, _ = queries.BindMapping(compoundPropertyType, compoundPropertyMapping, compoundPropertyPrimaryKeyColumns) + compoundPropertyInsertCacheMut sync.RWMutex + compoundPropertyInsertCache = make(map[string]insertCache) + compoundPropertyUpdateCacheMut sync.RWMutex + compoundPropertyUpdateCache = make(map[string]updateCache) + compoundPropertyUpsertCacheMut sync.RWMutex + compoundPropertyUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var compoundPropertyAfterSelectHooks []CompoundPropertyHook + +var compoundPropertyBeforeInsertHooks []CompoundPropertyHook +var compoundPropertyAfterInsertHooks []CompoundPropertyHook + +var compoundPropertyBeforeUpdateHooks []CompoundPropertyHook +var compoundPropertyAfterUpdateHooks []CompoundPropertyHook + +var compoundPropertyBeforeDeleteHooks []CompoundPropertyHook +var compoundPropertyAfterDeleteHooks []CompoundPropertyHook + +var compoundPropertyBeforeUpsertHooks []CompoundPropertyHook +var compoundPropertyAfterUpsertHooks []CompoundPropertyHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *CompoundProperty) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range compoundPropertyAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *CompoundProperty) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range compoundPropertyBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *CompoundProperty) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range compoundPropertyAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *CompoundProperty) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range compoundPropertyBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *CompoundProperty) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range compoundPropertyAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *CompoundProperty) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range compoundPropertyBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *CompoundProperty) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range compoundPropertyAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *CompoundProperty) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range compoundPropertyBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *CompoundProperty) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range compoundPropertyAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddCompoundPropertyHook registers your hook function for all future operations. +func AddCompoundPropertyHook(hookPoint boil.HookPoint, compoundPropertyHook CompoundPropertyHook) { + switch hookPoint { + case boil.AfterSelectHook: + compoundPropertyAfterSelectHooks = append(compoundPropertyAfterSelectHooks, compoundPropertyHook) + case boil.BeforeInsertHook: + compoundPropertyBeforeInsertHooks = append(compoundPropertyBeforeInsertHooks, compoundPropertyHook) + case boil.AfterInsertHook: + compoundPropertyAfterInsertHooks = append(compoundPropertyAfterInsertHooks, compoundPropertyHook) + case boil.BeforeUpdateHook: + compoundPropertyBeforeUpdateHooks = append(compoundPropertyBeforeUpdateHooks, compoundPropertyHook) + case boil.AfterUpdateHook: + compoundPropertyAfterUpdateHooks = append(compoundPropertyAfterUpdateHooks, compoundPropertyHook) + case boil.BeforeDeleteHook: + compoundPropertyBeforeDeleteHooks = append(compoundPropertyBeforeDeleteHooks, compoundPropertyHook) + case boil.AfterDeleteHook: + compoundPropertyAfterDeleteHooks = append(compoundPropertyAfterDeleteHooks, compoundPropertyHook) + case boil.BeforeUpsertHook: + compoundPropertyBeforeUpsertHooks = append(compoundPropertyBeforeUpsertHooks, compoundPropertyHook) + case boil.AfterUpsertHook: + compoundPropertyAfterUpsertHooks = append(compoundPropertyAfterUpsertHooks, compoundPropertyHook) + } +} + +// One returns a single compoundProperty record from the query. +func (q compoundPropertyQuery) One(ctx context.Context, exec boil.ContextExecutor) (*CompoundProperty, error) { + o := &CompoundProperty{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for compound_properties") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all CompoundProperty records from the query. +func (q compoundPropertyQuery) All(ctx context.Context, exec boil.ContextExecutor) (CompoundPropertySlice, error) { + var o []*CompoundProperty + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to CompoundProperty slice") + } + + if len(compoundPropertyAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all CompoundProperty records in the query. +func (q compoundPropertyQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count compound_properties rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q compoundPropertyQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if compound_properties exists") + } + + return count > 0, nil +} + +// MolregnoMoleculeDictionary pointed to by the foreign key. +func (o *CompoundProperty) MolregnoMoleculeDictionary(mods ...qm.QueryMod) moleculeDictionaryQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"molregno\" = ?", o.Molregno), + } + + queryMods = append(queryMods, mods...) + + return MoleculeDictionaries(queryMods...) +} + +// LoadMolregnoMoleculeDictionary allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (compoundPropertyL) LoadMolregnoMoleculeDictionary(ctx context.Context, e boil.ContextExecutor, singular bool, maybeCompoundProperty interface{}, mods queries.Applicator) error { + var slice []*CompoundProperty + var object *CompoundProperty + + if singular { + object = maybeCompoundProperty.(*CompoundProperty) + } else { + slice = *maybeCompoundProperty.(*[]*CompoundProperty) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &compoundPropertyR{} + } + args = append(args, object.Molregno) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &compoundPropertyR{} + } + + for _, a := range args { + if a == obj.Molregno { + continue Outer + } + } + + args = append(args, obj.Molregno) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`molecule_dictionary`), + qm.WhereIn(`molecule_dictionary.molregno in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load MoleculeDictionary") + } + + var resultSlice []*MoleculeDictionary + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice MoleculeDictionary") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for molecule_dictionary") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for molecule_dictionary") + } + + if len(compoundPropertyAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.MolregnoMoleculeDictionary = foreign + if foreign.R == nil { + foreign.R = &moleculeDictionaryR{} + } + foreign.R.MolregnoCompoundProperty = object + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.Molregno == foreign.Molregno { + local.R.MolregnoMoleculeDictionary = foreign + if foreign.R == nil { + foreign.R = &moleculeDictionaryR{} + } + foreign.R.MolregnoCompoundProperty = local + break + } + } + } + + return nil +} + +// SetMolregnoMoleculeDictionary of the compoundProperty to the related item. +// Sets o.R.MolregnoMoleculeDictionary to related. +// Adds o to related.R.MolregnoCompoundProperty. +func (o *CompoundProperty) SetMolregnoMoleculeDictionary(ctx context.Context, exec boil.ContextExecutor, insert bool, related *MoleculeDictionary) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"compound_properties\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"molregno"}), + strmangle.WhereClause("\"", "\"", 0, compoundPropertyPrimaryKeyColumns), + ) + values := []interface{}{related.Molregno, o.Molregno} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.Molregno = related.Molregno + if o.R == nil { + o.R = &compoundPropertyR{ + MolregnoMoleculeDictionary: related, + } + } else { + o.R.MolregnoMoleculeDictionary = related + } + + if related.R == nil { + related.R = &moleculeDictionaryR{ + MolregnoCompoundProperty: o, + } + } else { + related.R.MolregnoCompoundProperty = o + } + + return nil +} + +// CompoundProperties retrieves all the records using an executor. +func CompoundProperties(mods ...qm.QueryMod) compoundPropertyQuery { + mods = append(mods, qm.From("\"compound_properties\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"compound_properties\".*"}) + } + + return compoundPropertyQuery{q} +} + +// FindCompoundProperty retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindCompoundProperty(ctx context.Context, exec boil.ContextExecutor, molregno int64, selectCols ...string) (*CompoundProperty, error) { + compoundPropertyObj := &CompoundProperty{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"compound_properties\" where \"molregno\"=?", sel, + ) + + q := queries.Raw(query, molregno) + + err := q.Bind(ctx, exec, compoundPropertyObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from compound_properties") + } + + if err = compoundPropertyObj.doAfterSelectHooks(ctx, exec); err != nil { + return compoundPropertyObj, err + } + + return compoundPropertyObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *CompoundProperty) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no compound_properties provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(compoundPropertyColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + compoundPropertyInsertCacheMut.RLock() + cache, cached := compoundPropertyInsertCache[key] + compoundPropertyInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + compoundPropertyAllColumns, + compoundPropertyColumnsWithDefault, + compoundPropertyColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(compoundPropertyType, compoundPropertyMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(compoundPropertyType, compoundPropertyMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"compound_properties\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"compound_properties\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into compound_properties") + } + + if !cached { + compoundPropertyInsertCacheMut.Lock() + compoundPropertyInsertCache[key] = cache + compoundPropertyInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the CompoundProperty. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *CompoundProperty) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + compoundPropertyUpdateCacheMut.RLock() + cache, cached := compoundPropertyUpdateCache[key] + compoundPropertyUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + compoundPropertyAllColumns, + compoundPropertyPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update compound_properties, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"compound_properties\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, compoundPropertyPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(compoundPropertyType, compoundPropertyMapping, append(wl, compoundPropertyPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update compound_properties row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for compound_properties") + } + + if !cached { + compoundPropertyUpdateCacheMut.Lock() + compoundPropertyUpdateCache[key] = cache + compoundPropertyUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q compoundPropertyQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for compound_properties") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for compound_properties") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o CompoundPropertySlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), compoundPropertyPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"compound_properties\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, compoundPropertyPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in compoundProperty slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all compoundProperty") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *CompoundProperty) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no compound_properties provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(compoundPropertyColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + compoundPropertyUpsertCacheMut.RLock() + cache, cached := compoundPropertyUpsertCache[key] + compoundPropertyUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + compoundPropertyAllColumns, + compoundPropertyColumnsWithDefault, + compoundPropertyColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + compoundPropertyAllColumns, + compoundPropertyPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert compound_properties, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(compoundPropertyPrimaryKeyColumns)) + copy(conflict, compoundPropertyPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"compound_properties\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(compoundPropertyType, compoundPropertyMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(compoundPropertyType, compoundPropertyMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert compound_properties") + } + + if !cached { + compoundPropertyUpsertCacheMut.Lock() + compoundPropertyUpsertCache[key] = cache + compoundPropertyUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single CompoundProperty record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *CompoundProperty) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no CompoundProperty provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), compoundPropertyPrimaryKeyMapping) + sql := "DELETE FROM \"compound_properties\" WHERE \"molregno\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from compound_properties") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for compound_properties") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q compoundPropertyQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no compoundPropertyQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from compound_properties") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for compound_properties") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o CompoundPropertySlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(compoundPropertyBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), compoundPropertyPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"compound_properties\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, compoundPropertyPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from compoundProperty slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for compound_properties") + } + + if len(compoundPropertyAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *CompoundProperty) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindCompoundProperty(ctx, exec, o.Molregno) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *CompoundPropertySlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := CompoundPropertySlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), compoundPropertyPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"compound_properties\".* FROM \"compound_properties\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, compoundPropertyPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in CompoundPropertySlice") + } + + *o = slice + + return nil +} + +// CompoundPropertyExists checks if the CompoundProperty row exists. +func CompoundPropertyExists(ctx context.Context, exec boil.ContextExecutor, molregno int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"compound_properties\" where \"molregno\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, molregno) + } + row := exec.QueryRowContext(ctx, sql, molregno) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if compound_properties exists") + } + + return exists, nil +} diff --git a/models/compound_records.go b/models/compound_records.go new file mode 100644 index 0000000..ebe9d88 --- /dev/null +++ b/models/compound_records.go @@ -0,0 +1,3182 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// CompoundRecord is an object representing the database table. +type CompoundRecord struct { + RecordID int64 `boil:"record_id" json:"record_id" toml:"record_id" yaml:"record_id"` + Molregno null.Int64 `boil:"molregno" json:"molregno,omitempty" toml:"molregno" yaml:"molregno,omitempty"` + DocID int64 `boil:"doc_id" json:"doc_id" toml:"doc_id" yaml:"doc_id"` + CompoundKey null.String `boil:"compound_key" json:"compound_key,omitempty" toml:"compound_key" yaml:"compound_key,omitempty"` + CompoundName null.String `boil:"compound_name" json:"compound_name,omitempty" toml:"compound_name" yaml:"compound_name,omitempty"` + SRCID int64 `boil:"src_id" json:"src_id" toml:"src_id" yaml:"src_id"` + SRCCompoundID null.String `boil:"src_compound_id" json:"src_compound_id,omitempty" toml:"src_compound_id" yaml:"src_compound_id,omitempty"` + Cidx string `boil:"cidx" json:"cidx" toml:"cidx" yaml:"cidx"` + + R *compoundRecordR `boil:"-" json:"-" toml:"-" yaml:"-"` + L compoundRecordL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var CompoundRecordColumns = struct { + RecordID string + Molregno string + DocID string + CompoundKey string + CompoundName string + SRCID string + SRCCompoundID string + Cidx string +}{ + RecordID: "record_id", + Molregno: "molregno", + DocID: "doc_id", + CompoundKey: "compound_key", + CompoundName: "compound_name", + SRCID: "src_id", + SRCCompoundID: "src_compound_id", + Cidx: "cidx", +} + +var CompoundRecordTableColumns = struct { + RecordID string + Molregno string + DocID string + CompoundKey string + CompoundName string + SRCID string + SRCCompoundID string + Cidx string +}{ + RecordID: "compound_records.record_id", + Molregno: "compound_records.molregno", + DocID: "compound_records.doc_id", + CompoundKey: "compound_records.compound_key", + CompoundName: "compound_records.compound_name", + SRCID: "compound_records.src_id", + SRCCompoundID: "compound_records.src_compound_id", + Cidx: "compound_records.cidx", +} + +// Generated where + +var CompoundRecordWhere = struct { + RecordID whereHelperint64 + Molregno whereHelpernull_Int64 + DocID whereHelperint64 + CompoundKey whereHelpernull_String + CompoundName whereHelpernull_String + SRCID whereHelperint64 + SRCCompoundID whereHelpernull_String + Cidx whereHelperstring +}{ + RecordID: whereHelperint64{field: "\"compound_records\".\"record_id\""}, + Molregno: whereHelpernull_Int64{field: "\"compound_records\".\"molregno\""}, + DocID: whereHelperint64{field: "\"compound_records\".\"doc_id\""}, + CompoundKey: whereHelpernull_String{field: "\"compound_records\".\"compound_key\""}, + CompoundName: whereHelpernull_String{field: "\"compound_records\".\"compound_name\""}, + SRCID: whereHelperint64{field: "\"compound_records\".\"src_id\""}, + SRCCompoundID: whereHelpernull_String{field: "\"compound_records\".\"src_compound_id\""}, + Cidx: whereHelperstring{field: "\"compound_records\".\"cidx\""}, +} + +// CompoundRecordRels is where relationship names are stored. +var CompoundRecordRels = struct { + SRC string + MolregnoMoleculeDictionary string + Doc string + RecordActivities string + RecordDrugIndications string + RecordDrugMechanisms string + RecordDrugWarnings string + RecordFormulations string + SubstrateRecordMetabolisms string + MetaboliteRecordMetabolisms string + DrugRecordMetabolisms string +}{ + SRC: "SRC", + MolregnoMoleculeDictionary: "MolregnoMoleculeDictionary", + Doc: "Doc", + RecordActivities: "RecordActivities", + RecordDrugIndications: "RecordDrugIndications", + RecordDrugMechanisms: "RecordDrugMechanisms", + RecordDrugWarnings: "RecordDrugWarnings", + RecordFormulations: "RecordFormulations", + SubstrateRecordMetabolisms: "SubstrateRecordMetabolisms", + MetaboliteRecordMetabolisms: "MetaboliteRecordMetabolisms", + DrugRecordMetabolisms: "DrugRecordMetabolisms", +} + +// compoundRecordR is where relationships are stored. +type compoundRecordR struct { + SRC *Source `boil:"SRC" json:"SRC" toml:"SRC" yaml:"SRC"` + MolregnoMoleculeDictionary *MoleculeDictionary `boil:"MolregnoMoleculeDictionary" json:"MolregnoMoleculeDictionary" toml:"MolregnoMoleculeDictionary" yaml:"MolregnoMoleculeDictionary"` + Doc *Doc `boil:"Doc" json:"Doc" toml:"Doc" yaml:"Doc"` + RecordActivities ActivitySlice `boil:"RecordActivities" json:"RecordActivities" toml:"RecordActivities" yaml:"RecordActivities"` + RecordDrugIndications DrugIndicationSlice `boil:"RecordDrugIndications" json:"RecordDrugIndications" toml:"RecordDrugIndications" yaml:"RecordDrugIndications"` + RecordDrugMechanisms DrugMechanismSlice `boil:"RecordDrugMechanisms" json:"RecordDrugMechanisms" toml:"RecordDrugMechanisms" yaml:"RecordDrugMechanisms"` + RecordDrugWarnings DrugWarningSlice `boil:"RecordDrugWarnings" json:"RecordDrugWarnings" toml:"RecordDrugWarnings" yaml:"RecordDrugWarnings"` + RecordFormulations FormulationSlice `boil:"RecordFormulations" json:"RecordFormulations" toml:"RecordFormulations" yaml:"RecordFormulations"` + SubstrateRecordMetabolisms MetabolismSlice `boil:"SubstrateRecordMetabolisms" json:"SubstrateRecordMetabolisms" toml:"SubstrateRecordMetabolisms" yaml:"SubstrateRecordMetabolisms"` + MetaboliteRecordMetabolisms MetabolismSlice `boil:"MetaboliteRecordMetabolisms" json:"MetaboliteRecordMetabolisms" toml:"MetaboliteRecordMetabolisms" yaml:"MetaboliteRecordMetabolisms"` + DrugRecordMetabolisms MetabolismSlice `boil:"DrugRecordMetabolisms" json:"DrugRecordMetabolisms" toml:"DrugRecordMetabolisms" yaml:"DrugRecordMetabolisms"` +} + +// NewStruct creates a new relationship struct +func (*compoundRecordR) NewStruct() *compoundRecordR { + return &compoundRecordR{} +} + +func (r *compoundRecordR) GetSRC() *Source { + if r == nil { + return nil + } + return r.SRC +} + +func (r *compoundRecordR) GetMolregnoMoleculeDictionary() *MoleculeDictionary { + if r == nil { + return nil + } + return r.MolregnoMoleculeDictionary +} + +func (r *compoundRecordR) GetDoc() *Doc { + if r == nil { + return nil + } + return r.Doc +} + +func (r *compoundRecordR) GetRecordActivities() ActivitySlice { + if r == nil { + return nil + } + return r.RecordActivities +} + +func (r *compoundRecordR) GetRecordDrugIndications() DrugIndicationSlice { + if r == nil { + return nil + } + return r.RecordDrugIndications +} + +func (r *compoundRecordR) GetRecordDrugMechanisms() DrugMechanismSlice { + if r == nil { + return nil + } + return r.RecordDrugMechanisms +} + +func (r *compoundRecordR) GetRecordDrugWarnings() DrugWarningSlice { + if r == nil { + return nil + } + return r.RecordDrugWarnings +} + +func (r *compoundRecordR) GetRecordFormulations() FormulationSlice { + if r == nil { + return nil + } + return r.RecordFormulations +} + +func (r *compoundRecordR) GetSubstrateRecordMetabolisms() MetabolismSlice { + if r == nil { + return nil + } + return r.SubstrateRecordMetabolisms +} + +func (r *compoundRecordR) GetMetaboliteRecordMetabolisms() MetabolismSlice { + if r == nil { + return nil + } + return r.MetaboliteRecordMetabolisms +} + +func (r *compoundRecordR) GetDrugRecordMetabolisms() MetabolismSlice { + if r == nil { + return nil + } + return r.DrugRecordMetabolisms +} + +// compoundRecordL is where Load methods for each relationship are stored. +type compoundRecordL struct{} + +var ( + compoundRecordAllColumns = []string{"record_id", "molregno", "doc_id", "compound_key", "compound_name", "src_id", "src_compound_id", "cidx"} + compoundRecordColumnsWithoutDefault = []string{"record_id", "doc_id", "src_id", "cidx"} + compoundRecordColumnsWithDefault = []string{"molregno", "compound_key", "compound_name", "src_compound_id"} + compoundRecordPrimaryKeyColumns = []string{"record_id"} + compoundRecordGeneratedColumns = []string{} +) + +type ( + // CompoundRecordSlice is an alias for a slice of pointers to CompoundRecord. + // This should almost always be used instead of []CompoundRecord. + CompoundRecordSlice []*CompoundRecord + // CompoundRecordHook is the signature for custom CompoundRecord hook methods + CompoundRecordHook func(context.Context, boil.ContextExecutor, *CompoundRecord) error + + compoundRecordQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + compoundRecordType = reflect.TypeOf(&CompoundRecord{}) + compoundRecordMapping = queries.MakeStructMapping(compoundRecordType) + compoundRecordPrimaryKeyMapping, _ = queries.BindMapping(compoundRecordType, compoundRecordMapping, compoundRecordPrimaryKeyColumns) + compoundRecordInsertCacheMut sync.RWMutex + compoundRecordInsertCache = make(map[string]insertCache) + compoundRecordUpdateCacheMut sync.RWMutex + compoundRecordUpdateCache = make(map[string]updateCache) + compoundRecordUpsertCacheMut sync.RWMutex + compoundRecordUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var compoundRecordAfterSelectHooks []CompoundRecordHook + +var compoundRecordBeforeInsertHooks []CompoundRecordHook +var compoundRecordAfterInsertHooks []CompoundRecordHook + +var compoundRecordBeforeUpdateHooks []CompoundRecordHook +var compoundRecordAfterUpdateHooks []CompoundRecordHook + +var compoundRecordBeforeDeleteHooks []CompoundRecordHook +var compoundRecordAfterDeleteHooks []CompoundRecordHook + +var compoundRecordBeforeUpsertHooks []CompoundRecordHook +var compoundRecordAfterUpsertHooks []CompoundRecordHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *CompoundRecord) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range compoundRecordAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *CompoundRecord) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range compoundRecordBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *CompoundRecord) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range compoundRecordAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *CompoundRecord) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range compoundRecordBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *CompoundRecord) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range compoundRecordAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *CompoundRecord) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range compoundRecordBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *CompoundRecord) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range compoundRecordAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *CompoundRecord) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range compoundRecordBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *CompoundRecord) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range compoundRecordAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddCompoundRecordHook registers your hook function for all future operations. +func AddCompoundRecordHook(hookPoint boil.HookPoint, compoundRecordHook CompoundRecordHook) { + switch hookPoint { + case boil.AfterSelectHook: + compoundRecordAfterSelectHooks = append(compoundRecordAfterSelectHooks, compoundRecordHook) + case boil.BeforeInsertHook: + compoundRecordBeforeInsertHooks = append(compoundRecordBeforeInsertHooks, compoundRecordHook) + case boil.AfterInsertHook: + compoundRecordAfterInsertHooks = append(compoundRecordAfterInsertHooks, compoundRecordHook) + case boil.BeforeUpdateHook: + compoundRecordBeforeUpdateHooks = append(compoundRecordBeforeUpdateHooks, compoundRecordHook) + case boil.AfterUpdateHook: + compoundRecordAfterUpdateHooks = append(compoundRecordAfterUpdateHooks, compoundRecordHook) + case boil.BeforeDeleteHook: + compoundRecordBeforeDeleteHooks = append(compoundRecordBeforeDeleteHooks, compoundRecordHook) + case boil.AfterDeleteHook: + compoundRecordAfterDeleteHooks = append(compoundRecordAfterDeleteHooks, compoundRecordHook) + case boil.BeforeUpsertHook: + compoundRecordBeforeUpsertHooks = append(compoundRecordBeforeUpsertHooks, compoundRecordHook) + case boil.AfterUpsertHook: + compoundRecordAfterUpsertHooks = append(compoundRecordAfterUpsertHooks, compoundRecordHook) + } +} + +// One returns a single compoundRecord record from the query. +func (q compoundRecordQuery) One(ctx context.Context, exec boil.ContextExecutor) (*CompoundRecord, error) { + o := &CompoundRecord{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for compound_records") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all CompoundRecord records from the query. +func (q compoundRecordQuery) All(ctx context.Context, exec boil.ContextExecutor) (CompoundRecordSlice, error) { + var o []*CompoundRecord + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to CompoundRecord slice") + } + + if len(compoundRecordAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all CompoundRecord records in the query. +func (q compoundRecordQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count compound_records rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q compoundRecordQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if compound_records exists") + } + + return count > 0, nil +} + +// SRC pointed to by the foreign key. +func (o *CompoundRecord) SRC(mods ...qm.QueryMod) sourceQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"src_id\" = ?", o.SRCID), + } + + queryMods = append(queryMods, mods...) + + return Sources(queryMods...) +} + +// MolregnoMoleculeDictionary pointed to by the foreign key. +func (o *CompoundRecord) MolregnoMoleculeDictionary(mods ...qm.QueryMod) moleculeDictionaryQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"molregno\" = ?", o.Molregno), + } + + queryMods = append(queryMods, mods...) + + return MoleculeDictionaries(queryMods...) +} + +// Doc pointed to by the foreign key. +func (o *CompoundRecord) Doc(mods ...qm.QueryMod) docQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"doc_id\" = ?", o.DocID), + } + + queryMods = append(queryMods, mods...) + + return Docs(queryMods...) +} + +// RecordActivities retrieves all the activity's Activities with an executor via record_id column. +func (o *CompoundRecord) RecordActivities(mods ...qm.QueryMod) activityQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"activities\".\"record_id\"=?", o.RecordID), + ) + + return Activities(queryMods...) +} + +// RecordDrugIndications retrieves all the drug_indication's DrugIndications with an executor via record_id column. +func (o *CompoundRecord) RecordDrugIndications(mods ...qm.QueryMod) drugIndicationQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"drug_indication\".\"record_id\"=?", o.RecordID), + ) + + return DrugIndications(queryMods...) +} + +// RecordDrugMechanisms retrieves all the drug_mechanism's DrugMechanisms with an executor via record_id column. +func (o *CompoundRecord) RecordDrugMechanisms(mods ...qm.QueryMod) drugMechanismQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"drug_mechanism\".\"record_id\"=?", o.RecordID), + ) + + return DrugMechanisms(queryMods...) +} + +// RecordDrugWarnings retrieves all the drug_warning's DrugWarnings with an executor via record_id column. +func (o *CompoundRecord) RecordDrugWarnings(mods ...qm.QueryMod) drugWarningQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"drug_warning\".\"record_id\"=?", o.RecordID), + ) + + return DrugWarnings(queryMods...) +} + +// RecordFormulations retrieves all the formulation's Formulations with an executor via record_id column. +func (o *CompoundRecord) RecordFormulations(mods ...qm.QueryMod) formulationQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"formulations\".\"record_id\"=?", o.RecordID), + ) + + return Formulations(queryMods...) +} + +// SubstrateRecordMetabolisms retrieves all the metabolism's Metabolisms with an executor via substrate_record_id column. +func (o *CompoundRecord) SubstrateRecordMetabolisms(mods ...qm.QueryMod) metabolismQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"metabolism\".\"substrate_record_id\"=?", o.RecordID), + ) + + return Metabolisms(queryMods...) +} + +// MetaboliteRecordMetabolisms retrieves all the metabolism's Metabolisms with an executor via metabolite_record_id column. +func (o *CompoundRecord) MetaboliteRecordMetabolisms(mods ...qm.QueryMod) metabolismQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"metabolism\".\"metabolite_record_id\"=?", o.RecordID), + ) + + return Metabolisms(queryMods...) +} + +// DrugRecordMetabolisms retrieves all the metabolism's Metabolisms with an executor via drug_record_id column. +func (o *CompoundRecord) DrugRecordMetabolisms(mods ...qm.QueryMod) metabolismQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"metabolism\".\"drug_record_id\"=?", o.RecordID), + ) + + return Metabolisms(queryMods...) +} + +// LoadSRC allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (compoundRecordL) LoadSRC(ctx context.Context, e boil.ContextExecutor, singular bool, maybeCompoundRecord interface{}, mods queries.Applicator) error { + var slice []*CompoundRecord + var object *CompoundRecord + + if singular { + object = maybeCompoundRecord.(*CompoundRecord) + } else { + slice = *maybeCompoundRecord.(*[]*CompoundRecord) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &compoundRecordR{} + } + args = append(args, object.SRCID) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &compoundRecordR{} + } + + for _, a := range args { + if a == obj.SRCID { + continue Outer + } + } + + args = append(args, obj.SRCID) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`source`), + qm.WhereIn(`source.src_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load Source") + } + + var resultSlice []*Source + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice Source") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for source") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for source") + } + + if len(compoundRecordAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.SRC = foreign + if foreign.R == nil { + foreign.R = &sourceR{} + } + foreign.R.SRCCompoundRecords = append(foreign.R.SRCCompoundRecords, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.SRCID == foreign.SRCID { + local.R.SRC = foreign + if foreign.R == nil { + foreign.R = &sourceR{} + } + foreign.R.SRCCompoundRecords = append(foreign.R.SRCCompoundRecords, local) + break + } + } + } + + return nil +} + +// LoadMolregnoMoleculeDictionary allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (compoundRecordL) LoadMolregnoMoleculeDictionary(ctx context.Context, e boil.ContextExecutor, singular bool, maybeCompoundRecord interface{}, mods queries.Applicator) error { + var slice []*CompoundRecord + var object *CompoundRecord + + if singular { + object = maybeCompoundRecord.(*CompoundRecord) + } else { + slice = *maybeCompoundRecord.(*[]*CompoundRecord) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &compoundRecordR{} + } + if !queries.IsNil(object.Molregno) { + args = append(args, object.Molregno) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &compoundRecordR{} + } + + for _, a := range args { + if queries.Equal(a, obj.Molregno) { + continue Outer + } + } + + if !queries.IsNil(obj.Molregno) { + args = append(args, obj.Molregno) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`molecule_dictionary`), + qm.WhereIn(`molecule_dictionary.molregno in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load MoleculeDictionary") + } + + var resultSlice []*MoleculeDictionary + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice MoleculeDictionary") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for molecule_dictionary") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for molecule_dictionary") + } + + if len(compoundRecordAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.MolregnoMoleculeDictionary = foreign + if foreign.R == nil { + foreign.R = &moleculeDictionaryR{} + } + foreign.R.MolregnoCompoundRecords = append(foreign.R.MolregnoCompoundRecords, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.Molregno, foreign.Molregno) { + local.R.MolregnoMoleculeDictionary = foreign + if foreign.R == nil { + foreign.R = &moleculeDictionaryR{} + } + foreign.R.MolregnoCompoundRecords = append(foreign.R.MolregnoCompoundRecords, local) + break + } + } + } + + return nil +} + +// LoadDoc allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (compoundRecordL) LoadDoc(ctx context.Context, e boil.ContextExecutor, singular bool, maybeCompoundRecord interface{}, mods queries.Applicator) error { + var slice []*CompoundRecord + var object *CompoundRecord + + if singular { + object = maybeCompoundRecord.(*CompoundRecord) + } else { + slice = *maybeCompoundRecord.(*[]*CompoundRecord) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &compoundRecordR{} + } + args = append(args, object.DocID) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &compoundRecordR{} + } + + for _, a := range args { + if a == obj.DocID { + continue Outer + } + } + + args = append(args, obj.DocID) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`docs`), + qm.WhereIn(`docs.doc_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load Doc") + } + + var resultSlice []*Doc + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice Doc") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for docs") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for docs") + } + + if len(compoundRecordAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Doc = foreign + if foreign.R == nil { + foreign.R = &docR{} + } + foreign.R.CompoundRecords = append(foreign.R.CompoundRecords, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.DocID == foreign.DocID { + local.R.Doc = foreign + if foreign.R == nil { + foreign.R = &docR{} + } + foreign.R.CompoundRecords = append(foreign.R.CompoundRecords, local) + break + } + } + } + + return nil +} + +// LoadRecordActivities allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (compoundRecordL) LoadRecordActivities(ctx context.Context, e boil.ContextExecutor, singular bool, maybeCompoundRecord interface{}, mods queries.Applicator) error { + var slice []*CompoundRecord + var object *CompoundRecord + + if singular { + object = maybeCompoundRecord.(*CompoundRecord) + } else { + slice = *maybeCompoundRecord.(*[]*CompoundRecord) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &compoundRecordR{} + } + args = append(args, object.RecordID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &compoundRecordR{} + } + + for _, a := range args { + if a == obj.RecordID { + continue Outer + } + } + + args = append(args, obj.RecordID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`activities`), + qm.WhereIn(`activities.record_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load activities") + } + + var resultSlice []*Activity + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice activities") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on activities") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for activities") + } + + if len(activityAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.RecordActivities = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &activityR{} + } + foreign.R.Record = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.RecordID == foreign.RecordID { + local.R.RecordActivities = append(local.R.RecordActivities, foreign) + if foreign.R == nil { + foreign.R = &activityR{} + } + foreign.R.Record = local + break + } + } + } + + return nil +} + +// LoadRecordDrugIndications allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (compoundRecordL) LoadRecordDrugIndications(ctx context.Context, e boil.ContextExecutor, singular bool, maybeCompoundRecord interface{}, mods queries.Applicator) error { + var slice []*CompoundRecord + var object *CompoundRecord + + if singular { + object = maybeCompoundRecord.(*CompoundRecord) + } else { + slice = *maybeCompoundRecord.(*[]*CompoundRecord) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &compoundRecordR{} + } + args = append(args, object.RecordID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &compoundRecordR{} + } + + for _, a := range args { + if a == obj.RecordID { + continue Outer + } + } + + args = append(args, obj.RecordID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`drug_indication`), + qm.WhereIn(`drug_indication.record_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load drug_indication") + } + + var resultSlice []*DrugIndication + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice drug_indication") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on drug_indication") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for drug_indication") + } + + if len(drugIndicationAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.RecordDrugIndications = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &drugIndicationR{} + } + foreign.R.Record = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.RecordID == foreign.RecordID { + local.R.RecordDrugIndications = append(local.R.RecordDrugIndications, foreign) + if foreign.R == nil { + foreign.R = &drugIndicationR{} + } + foreign.R.Record = local + break + } + } + } + + return nil +} + +// LoadRecordDrugMechanisms allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (compoundRecordL) LoadRecordDrugMechanisms(ctx context.Context, e boil.ContextExecutor, singular bool, maybeCompoundRecord interface{}, mods queries.Applicator) error { + var slice []*CompoundRecord + var object *CompoundRecord + + if singular { + object = maybeCompoundRecord.(*CompoundRecord) + } else { + slice = *maybeCompoundRecord.(*[]*CompoundRecord) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &compoundRecordR{} + } + args = append(args, object.RecordID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &compoundRecordR{} + } + + for _, a := range args { + if a == obj.RecordID { + continue Outer + } + } + + args = append(args, obj.RecordID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`drug_mechanism`), + qm.WhereIn(`drug_mechanism.record_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load drug_mechanism") + } + + var resultSlice []*DrugMechanism + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice drug_mechanism") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on drug_mechanism") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for drug_mechanism") + } + + if len(drugMechanismAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.RecordDrugMechanisms = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &drugMechanismR{} + } + foreign.R.Record = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.RecordID == foreign.RecordID { + local.R.RecordDrugMechanisms = append(local.R.RecordDrugMechanisms, foreign) + if foreign.R == nil { + foreign.R = &drugMechanismR{} + } + foreign.R.Record = local + break + } + } + } + + return nil +} + +// LoadRecordDrugWarnings allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (compoundRecordL) LoadRecordDrugWarnings(ctx context.Context, e boil.ContextExecutor, singular bool, maybeCompoundRecord interface{}, mods queries.Applicator) error { + var slice []*CompoundRecord + var object *CompoundRecord + + if singular { + object = maybeCompoundRecord.(*CompoundRecord) + } else { + slice = *maybeCompoundRecord.(*[]*CompoundRecord) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &compoundRecordR{} + } + args = append(args, object.RecordID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &compoundRecordR{} + } + + for _, a := range args { + if queries.Equal(a, obj.RecordID) { + continue Outer + } + } + + args = append(args, obj.RecordID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`drug_warning`), + qm.WhereIn(`drug_warning.record_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load drug_warning") + } + + var resultSlice []*DrugWarning + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice drug_warning") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on drug_warning") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for drug_warning") + } + + if len(drugWarningAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.RecordDrugWarnings = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &drugWarningR{} + } + foreign.R.Record = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.RecordID, foreign.RecordID) { + local.R.RecordDrugWarnings = append(local.R.RecordDrugWarnings, foreign) + if foreign.R == nil { + foreign.R = &drugWarningR{} + } + foreign.R.Record = local + break + } + } + } + + return nil +} + +// LoadRecordFormulations allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (compoundRecordL) LoadRecordFormulations(ctx context.Context, e boil.ContextExecutor, singular bool, maybeCompoundRecord interface{}, mods queries.Applicator) error { + var slice []*CompoundRecord + var object *CompoundRecord + + if singular { + object = maybeCompoundRecord.(*CompoundRecord) + } else { + slice = *maybeCompoundRecord.(*[]*CompoundRecord) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &compoundRecordR{} + } + args = append(args, object.RecordID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &compoundRecordR{} + } + + for _, a := range args { + if a == obj.RecordID { + continue Outer + } + } + + args = append(args, obj.RecordID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`formulations`), + qm.WhereIn(`formulations.record_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load formulations") + } + + var resultSlice []*Formulation + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice formulations") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on formulations") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for formulations") + } + + if len(formulationAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.RecordFormulations = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &formulationR{} + } + foreign.R.Record = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.RecordID == foreign.RecordID { + local.R.RecordFormulations = append(local.R.RecordFormulations, foreign) + if foreign.R == nil { + foreign.R = &formulationR{} + } + foreign.R.Record = local + break + } + } + } + + return nil +} + +// LoadSubstrateRecordMetabolisms allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (compoundRecordL) LoadSubstrateRecordMetabolisms(ctx context.Context, e boil.ContextExecutor, singular bool, maybeCompoundRecord interface{}, mods queries.Applicator) error { + var slice []*CompoundRecord + var object *CompoundRecord + + if singular { + object = maybeCompoundRecord.(*CompoundRecord) + } else { + slice = *maybeCompoundRecord.(*[]*CompoundRecord) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &compoundRecordR{} + } + args = append(args, object.RecordID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &compoundRecordR{} + } + + for _, a := range args { + if queries.Equal(a, obj.RecordID) { + continue Outer + } + } + + args = append(args, obj.RecordID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`metabolism`), + qm.WhereIn(`metabolism.substrate_record_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load metabolism") + } + + var resultSlice []*Metabolism + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice metabolism") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on metabolism") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for metabolism") + } + + if len(metabolismAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.SubstrateRecordMetabolisms = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &metabolismR{} + } + foreign.R.SubstrateRecord = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.RecordID, foreign.SubstrateRecordID) { + local.R.SubstrateRecordMetabolisms = append(local.R.SubstrateRecordMetabolisms, foreign) + if foreign.R == nil { + foreign.R = &metabolismR{} + } + foreign.R.SubstrateRecord = local + break + } + } + } + + return nil +} + +// LoadMetaboliteRecordMetabolisms allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (compoundRecordL) LoadMetaboliteRecordMetabolisms(ctx context.Context, e boil.ContextExecutor, singular bool, maybeCompoundRecord interface{}, mods queries.Applicator) error { + var slice []*CompoundRecord + var object *CompoundRecord + + if singular { + object = maybeCompoundRecord.(*CompoundRecord) + } else { + slice = *maybeCompoundRecord.(*[]*CompoundRecord) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &compoundRecordR{} + } + args = append(args, object.RecordID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &compoundRecordR{} + } + + for _, a := range args { + if queries.Equal(a, obj.RecordID) { + continue Outer + } + } + + args = append(args, obj.RecordID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`metabolism`), + qm.WhereIn(`metabolism.metabolite_record_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load metabolism") + } + + var resultSlice []*Metabolism + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice metabolism") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on metabolism") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for metabolism") + } + + if len(metabolismAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.MetaboliteRecordMetabolisms = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &metabolismR{} + } + foreign.R.MetaboliteRecord = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.RecordID, foreign.MetaboliteRecordID) { + local.R.MetaboliteRecordMetabolisms = append(local.R.MetaboliteRecordMetabolisms, foreign) + if foreign.R == nil { + foreign.R = &metabolismR{} + } + foreign.R.MetaboliteRecord = local + break + } + } + } + + return nil +} + +// LoadDrugRecordMetabolisms allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (compoundRecordL) LoadDrugRecordMetabolisms(ctx context.Context, e boil.ContextExecutor, singular bool, maybeCompoundRecord interface{}, mods queries.Applicator) error { + var slice []*CompoundRecord + var object *CompoundRecord + + if singular { + object = maybeCompoundRecord.(*CompoundRecord) + } else { + slice = *maybeCompoundRecord.(*[]*CompoundRecord) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &compoundRecordR{} + } + args = append(args, object.RecordID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &compoundRecordR{} + } + + for _, a := range args { + if queries.Equal(a, obj.RecordID) { + continue Outer + } + } + + args = append(args, obj.RecordID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`metabolism`), + qm.WhereIn(`metabolism.drug_record_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load metabolism") + } + + var resultSlice []*Metabolism + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice metabolism") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on metabolism") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for metabolism") + } + + if len(metabolismAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.DrugRecordMetabolisms = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &metabolismR{} + } + foreign.R.DrugRecord = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.RecordID, foreign.DrugRecordID) { + local.R.DrugRecordMetabolisms = append(local.R.DrugRecordMetabolisms, foreign) + if foreign.R == nil { + foreign.R = &metabolismR{} + } + foreign.R.DrugRecord = local + break + } + } + } + + return nil +} + +// SetSRC of the compoundRecord to the related item. +// Sets o.R.SRC to related. +// Adds o to related.R.SRCCompoundRecords. +func (o *CompoundRecord) SetSRC(ctx context.Context, exec boil.ContextExecutor, insert bool, related *Source) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"compound_records\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"src_id"}), + strmangle.WhereClause("\"", "\"", 0, compoundRecordPrimaryKeyColumns), + ) + values := []interface{}{related.SRCID, o.RecordID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.SRCID = related.SRCID + if o.R == nil { + o.R = &compoundRecordR{ + SRC: related, + } + } else { + o.R.SRC = related + } + + if related.R == nil { + related.R = &sourceR{ + SRCCompoundRecords: CompoundRecordSlice{o}, + } + } else { + related.R.SRCCompoundRecords = append(related.R.SRCCompoundRecords, o) + } + + return nil +} + +// SetMolregnoMoleculeDictionary of the compoundRecord to the related item. +// Sets o.R.MolregnoMoleculeDictionary to related. +// Adds o to related.R.MolregnoCompoundRecords. +func (o *CompoundRecord) SetMolregnoMoleculeDictionary(ctx context.Context, exec boil.ContextExecutor, insert bool, related *MoleculeDictionary) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"compound_records\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"molregno"}), + strmangle.WhereClause("\"", "\"", 0, compoundRecordPrimaryKeyColumns), + ) + values := []interface{}{related.Molregno, o.RecordID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.Molregno, related.Molregno) + if o.R == nil { + o.R = &compoundRecordR{ + MolregnoMoleculeDictionary: related, + } + } else { + o.R.MolregnoMoleculeDictionary = related + } + + if related.R == nil { + related.R = &moleculeDictionaryR{ + MolregnoCompoundRecords: CompoundRecordSlice{o}, + } + } else { + related.R.MolregnoCompoundRecords = append(related.R.MolregnoCompoundRecords, o) + } + + return nil +} + +// RemoveMolregnoMoleculeDictionary relationship. +// Sets o.R.MolregnoMoleculeDictionary to nil. +// Removes o from all passed in related items' relationships struct. +func (o *CompoundRecord) RemoveMolregnoMoleculeDictionary(ctx context.Context, exec boil.ContextExecutor, related *MoleculeDictionary) error { + var err error + + queries.SetScanner(&o.Molregno, nil) + if _, err = o.Update(ctx, exec, boil.Whitelist("molregno")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.MolregnoMoleculeDictionary = nil + } + if related == nil || related.R == nil { + return nil + } + + for i, ri := range related.R.MolregnoCompoundRecords { + if queries.Equal(o.Molregno, ri.Molregno) { + continue + } + + ln := len(related.R.MolregnoCompoundRecords) + if ln > 1 && i < ln-1 { + related.R.MolregnoCompoundRecords[i] = related.R.MolregnoCompoundRecords[ln-1] + } + related.R.MolregnoCompoundRecords = related.R.MolregnoCompoundRecords[:ln-1] + break + } + return nil +} + +// SetDoc of the compoundRecord to the related item. +// Sets o.R.Doc to related. +// Adds o to related.R.CompoundRecords. +func (o *CompoundRecord) SetDoc(ctx context.Context, exec boil.ContextExecutor, insert bool, related *Doc) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"compound_records\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"doc_id"}), + strmangle.WhereClause("\"", "\"", 0, compoundRecordPrimaryKeyColumns), + ) + values := []interface{}{related.DocID, o.RecordID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.DocID = related.DocID + if o.R == nil { + o.R = &compoundRecordR{ + Doc: related, + } + } else { + o.R.Doc = related + } + + if related.R == nil { + related.R = &docR{ + CompoundRecords: CompoundRecordSlice{o}, + } + } else { + related.R.CompoundRecords = append(related.R.CompoundRecords, o) + } + + return nil +} + +// AddRecordActivities adds the given related objects to the existing relationships +// of the compound_record, optionally inserting them as new records. +// Appends related to o.R.RecordActivities. +// Sets related.R.Record appropriately. +func (o *CompoundRecord) AddRecordActivities(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Activity) error { + var err error + for _, rel := range related { + if insert { + rel.RecordID = o.RecordID + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"activities\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"record_id"}), + strmangle.WhereClause("\"", "\"", 0, activityPrimaryKeyColumns), + ) + values := []interface{}{o.RecordID, rel.ActivityID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.RecordID = o.RecordID + } + } + + if o.R == nil { + o.R = &compoundRecordR{ + RecordActivities: related, + } + } else { + o.R.RecordActivities = append(o.R.RecordActivities, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &activityR{ + Record: o, + } + } else { + rel.R.Record = o + } + } + return nil +} + +// AddRecordDrugIndications adds the given related objects to the existing relationships +// of the compound_record, optionally inserting them as new records. +// Appends related to o.R.RecordDrugIndications. +// Sets related.R.Record appropriately. +func (o *CompoundRecord) AddRecordDrugIndications(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*DrugIndication) error { + var err error + for _, rel := range related { + if insert { + rel.RecordID = o.RecordID + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"drug_indication\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"record_id"}), + strmangle.WhereClause("\"", "\"", 0, drugIndicationPrimaryKeyColumns), + ) + values := []interface{}{o.RecordID, rel.DrugindID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.RecordID = o.RecordID + } + } + + if o.R == nil { + o.R = &compoundRecordR{ + RecordDrugIndications: related, + } + } else { + o.R.RecordDrugIndications = append(o.R.RecordDrugIndications, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &drugIndicationR{ + Record: o, + } + } else { + rel.R.Record = o + } + } + return nil +} + +// AddRecordDrugMechanisms adds the given related objects to the existing relationships +// of the compound_record, optionally inserting them as new records. +// Appends related to o.R.RecordDrugMechanisms. +// Sets related.R.Record appropriately. +func (o *CompoundRecord) AddRecordDrugMechanisms(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*DrugMechanism) error { + var err error + for _, rel := range related { + if insert { + rel.RecordID = o.RecordID + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"drug_mechanism\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"record_id"}), + strmangle.WhereClause("\"", "\"", 0, drugMechanismPrimaryKeyColumns), + ) + values := []interface{}{o.RecordID, rel.MecID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.RecordID = o.RecordID + } + } + + if o.R == nil { + o.R = &compoundRecordR{ + RecordDrugMechanisms: related, + } + } else { + o.R.RecordDrugMechanisms = append(o.R.RecordDrugMechanisms, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &drugMechanismR{ + Record: o, + } + } else { + rel.R.Record = o + } + } + return nil +} + +// AddRecordDrugWarnings adds the given related objects to the existing relationships +// of the compound_record, optionally inserting them as new records. +// Appends related to o.R.RecordDrugWarnings. +// Sets related.R.Record appropriately. +func (o *CompoundRecord) AddRecordDrugWarnings(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*DrugWarning) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.RecordID, o.RecordID) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"drug_warning\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"record_id"}), + strmangle.WhereClause("\"", "\"", 0, drugWarningPrimaryKeyColumns), + ) + values := []interface{}{o.RecordID, rel.WarningID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.RecordID, o.RecordID) + } + } + + if o.R == nil { + o.R = &compoundRecordR{ + RecordDrugWarnings: related, + } + } else { + o.R.RecordDrugWarnings = append(o.R.RecordDrugWarnings, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &drugWarningR{ + Record: o, + } + } else { + rel.R.Record = o + } + } + return nil +} + +// SetRecordDrugWarnings removes all previously related items of the +// compound_record replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.Record's RecordDrugWarnings accordingly. +// Replaces o.R.RecordDrugWarnings with related. +// Sets related.R.Record's RecordDrugWarnings accordingly. +func (o *CompoundRecord) SetRecordDrugWarnings(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*DrugWarning) error { + query := "update \"drug_warning\" set \"record_id\" = null where \"record_id\" = ?" + values := []interface{}{o.RecordID} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + if o.R != nil { + for _, rel := range o.R.RecordDrugWarnings { + queries.SetScanner(&rel.RecordID, nil) + if rel.R == nil { + continue + } + + rel.R.Record = nil + } + o.R.RecordDrugWarnings = nil + } + + return o.AddRecordDrugWarnings(ctx, exec, insert, related...) +} + +// RemoveRecordDrugWarnings relationships from objects passed in. +// Removes related items from R.RecordDrugWarnings (uses pointer comparison, removal does not keep order) +// Sets related.R.Record. +func (o *CompoundRecord) RemoveRecordDrugWarnings(ctx context.Context, exec boil.ContextExecutor, related ...*DrugWarning) error { + if len(related) == 0 { + return nil + } + + var err error + for _, rel := range related { + queries.SetScanner(&rel.RecordID, nil) + if rel.R != nil { + rel.R.Record = nil + } + if _, err = rel.Update(ctx, exec, boil.Whitelist("record_id")); err != nil { + return err + } + } + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.RecordDrugWarnings { + if rel != ri { + continue + } + + ln := len(o.R.RecordDrugWarnings) + if ln > 1 && i < ln-1 { + o.R.RecordDrugWarnings[i] = o.R.RecordDrugWarnings[ln-1] + } + o.R.RecordDrugWarnings = o.R.RecordDrugWarnings[:ln-1] + break + } + } + + return nil +} + +// AddRecordFormulations adds the given related objects to the existing relationships +// of the compound_record, optionally inserting them as new records. +// Appends related to o.R.RecordFormulations. +// Sets related.R.Record appropriately. +func (o *CompoundRecord) AddRecordFormulations(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Formulation) error { + var err error + for _, rel := range related { + if insert { + rel.RecordID = o.RecordID + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"formulations\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"record_id"}), + strmangle.WhereClause("\"", "\"", 0, formulationPrimaryKeyColumns), + ) + values := []interface{}{o.RecordID, rel.FormulationID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.RecordID = o.RecordID + } + } + + if o.R == nil { + o.R = &compoundRecordR{ + RecordFormulations: related, + } + } else { + o.R.RecordFormulations = append(o.R.RecordFormulations, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &formulationR{ + Record: o, + } + } else { + rel.R.Record = o + } + } + return nil +} + +// AddSubstrateRecordMetabolisms adds the given related objects to the existing relationships +// of the compound_record, optionally inserting them as new records. +// Appends related to o.R.SubstrateRecordMetabolisms. +// Sets related.R.SubstrateRecord appropriately. +func (o *CompoundRecord) AddSubstrateRecordMetabolisms(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Metabolism) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.SubstrateRecordID, o.RecordID) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"metabolism\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"substrate_record_id"}), + strmangle.WhereClause("\"", "\"", 0, metabolismPrimaryKeyColumns), + ) + values := []interface{}{o.RecordID, rel.MetID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.SubstrateRecordID, o.RecordID) + } + } + + if o.R == nil { + o.R = &compoundRecordR{ + SubstrateRecordMetabolisms: related, + } + } else { + o.R.SubstrateRecordMetabolisms = append(o.R.SubstrateRecordMetabolisms, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &metabolismR{ + SubstrateRecord: o, + } + } else { + rel.R.SubstrateRecord = o + } + } + return nil +} + +// SetSubstrateRecordMetabolisms removes all previously related items of the +// compound_record replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.SubstrateRecord's SubstrateRecordMetabolisms accordingly. +// Replaces o.R.SubstrateRecordMetabolisms with related. +// Sets related.R.SubstrateRecord's SubstrateRecordMetabolisms accordingly. +func (o *CompoundRecord) SetSubstrateRecordMetabolisms(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Metabolism) error { + query := "update \"metabolism\" set \"substrate_record_id\" = null where \"substrate_record_id\" = ?" + values := []interface{}{o.RecordID} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + if o.R != nil { + for _, rel := range o.R.SubstrateRecordMetabolisms { + queries.SetScanner(&rel.SubstrateRecordID, nil) + if rel.R == nil { + continue + } + + rel.R.SubstrateRecord = nil + } + o.R.SubstrateRecordMetabolisms = nil + } + + return o.AddSubstrateRecordMetabolisms(ctx, exec, insert, related...) +} + +// RemoveSubstrateRecordMetabolisms relationships from objects passed in. +// Removes related items from R.SubstrateRecordMetabolisms (uses pointer comparison, removal does not keep order) +// Sets related.R.SubstrateRecord. +func (o *CompoundRecord) RemoveSubstrateRecordMetabolisms(ctx context.Context, exec boil.ContextExecutor, related ...*Metabolism) error { + if len(related) == 0 { + return nil + } + + var err error + for _, rel := range related { + queries.SetScanner(&rel.SubstrateRecordID, nil) + if rel.R != nil { + rel.R.SubstrateRecord = nil + } + if _, err = rel.Update(ctx, exec, boil.Whitelist("substrate_record_id")); err != nil { + return err + } + } + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.SubstrateRecordMetabolisms { + if rel != ri { + continue + } + + ln := len(o.R.SubstrateRecordMetabolisms) + if ln > 1 && i < ln-1 { + o.R.SubstrateRecordMetabolisms[i] = o.R.SubstrateRecordMetabolisms[ln-1] + } + o.R.SubstrateRecordMetabolisms = o.R.SubstrateRecordMetabolisms[:ln-1] + break + } + } + + return nil +} + +// AddMetaboliteRecordMetabolisms adds the given related objects to the existing relationships +// of the compound_record, optionally inserting them as new records. +// Appends related to o.R.MetaboliteRecordMetabolisms. +// Sets related.R.MetaboliteRecord appropriately. +func (o *CompoundRecord) AddMetaboliteRecordMetabolisms(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Metabolism) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.MetaboliteRecordID, o.RecordID) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"metabolism\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"metabolite_record_id"}), + strmangle.WhereClause("\"", "\"", 0, metabolismPrimaryKeyColumns), + ) + values := []interface{}{o.RecordID, rel.MetID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.MetaboliteRecordID, o.RecordID) + } + } + + if o.R == nil { + o.R = &compoundRecordR{ + MetaboliteRecordMetabolisms: related, + } + } else { + o.R.MetaboliteRecordMetabolisms = append(o.R.MetaboliteRecordMetabolisms, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &metabolismR{ + MetaboliteRecord: o, + } + } else { + rel.R.MetaboliteRecord = o + } + } + return nil +} + +// SetMetaboliteRecordMetabolisms removes all previously related items of the +// compound_record replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.MetaboliteRecord's MetaboliteRecordMetabolisms accordingly. +// Replaces o.R.MetaboliteRecordMetabolisms with related. +// Sets related.R.MetaboliteRecord's MetaboliteRecordMetabolisms accordingly. +func (o *CompoundRecord) SetMetaboliteRecordMetabolisms(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Metabolism) error { + query := "update \"metabolism\" set \"metabolite_record_id\" = null where \"metabolite_record_id\" = ?" + values := []interface{}{o.RecordID} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + if o.R != nil { + for _, rel := range o.R.MetaboliteRecordMetabolisms { + queries.SetScanner(&rel.MetaboliteRecordID, nil) + if rel.R == nil { + continue + } + + rel.R.MetaboliteRecord = nil + } + o.R.MetaboliteRecordMetabolisms = nil + } + + return o.AddMetaboliteRecordMetabolisms(ctx, exec, insert, related...) +} + +// RemoveMetaboliteRecordMetabolisms relationships from objects passed in. +// Removes related items from R.MetaboliteRecordMetabolisms (uses pointer comparison, removal does not keep order) +// Sets related.R.MetaboliteRecord. +func (o *CompoundRecord) RemoveMetaboliteRecordMetabolisms(ctx context.Context, exec boil.ContextExecutor, related ...*Metabolism) error { + if len(related) == 0 { + return nil + } + + var err error + for _, rel := range related { + queries.SetScanner(&rel.MetaboliteRecordID, nil) + if rel.R != nil { + rel.R.MetaboliteRecord = nil + } + if _, err = rel.Update(ctx, exec, boil.Whitelist("metabolite_record_id")); err != nil { + return err + } + } + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.MetaboliteRecordMetabolisms { + if rel != ri { + continue + } + + ln := len(o.R.MetaboliteRecordMetabolisms) + if ln > 1 && i < ln-1 { + o.R.MetaboliteRecordMetabolisms[i] = o.R.MetaboliteRecordMetabolisms[ln-1] + } + o.R.MetaboliteRecordMetabolisms = o.R.MetaboliteRecordMetabolisms[:ln-1] + break + } + } + + return nil +} + +// AddDrugRecordMetabolisms adds the given related objects to the existing relationships +// of the compound_record, optionally inserting them as new records. +// Appends related to o.R.DrugRecordMetabolisms. +// Sets related.R.DrugRecord appropriately. +func (o *CompoundRecord) AddDrugRecordMetabolisms(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Metabolism) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.DrugRecordID, o.RecordID) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"metabolism\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"drug_record_id"}), + strmangle.WhereClause("\"", "\"", 0, metabolismPrimaryKeyColumns), + ) + values := []interface{}{o.RecordID, rel.MetID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.DrugRecordID, o.RecordID) + } + } + + if o.R == nil { + o.R = &compoundRecordR{ + DrugRecordMetabolisms: related, + } + } else { + o.R.DrugRecordMetabolisms = append(o.R.DrugRecordMetabolisms, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &metabolismR{ + DrugRecord: o, + } + } else { + rel.R.DrugRecord = o + } + } + return nil +} + +// SetDrugRecordMetabolisms removes all previously related items of the +// compound_record replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.DrugRecord's DrugRecordMetabolisms accordingly. +// Replaces o.R.DrugRecordMetabolisms with related. +// Sets related.R.DrugRecord's DrugRecordMetabolisms accordingly. +func (o *CompoundRecord) SetDrugRecordMetabolisms(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Metabolism) error { + query := "update \"metabolism\" set \"drug_record_id\" = null where \"drug_record_id\" = ?" + values := []interface{}{o.RecordID} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + if o.R != nil { + for _, rel := range o.R.DrugRecordMetabolisms { + queries.SetScanner(&rel.DrugRecordID, nil) + if rel.R == nil { + continue + } + + rel.R.DrugRecord = nil + } + o.R.DrugRecordMetabolisms = nil + } + + return o.AddDrugRecordMetabolisms(ctx, exec, insert, related...) +} + +// RemoveDrugRecordMetabolisms relationships from objects passed in. +// Removes related items from R.DrugRecordMetabolisms (uses pointer comparison, removal does not keep order) +// Sets related.R.DrugRecord. +func (o *CompoundRecord) RemoveDrugRecordMetabolisms(ctx context.Context, exec boil.ContextExecutor, related ...*Metabolism) error { + if len(related) == 0 { + return nil + } + + var err error + for _, rel := range related { + queries.SetScanner(&rel.DrugRecordID, nil) + if rel.R != nil { + rel.R.DrugRecord = nil + } + if _, err = rel.Update(ctx, exec, boil.Whitelist("drug_record_id")); err != nil { + return err + } + } + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.DrugRecordMetabolisms { + if rel != ri { + continue + } + + ln := len(o.R.DrugRecordMetabolisms) + if ln > 1 && i < ln-1 { + o.R.DrugRecordMetabolisms[i] = o.R.DrugRecordMetabolisms[ln-1] + } + o.R.DrugRecordMetabolisms = o.R.DrugRecordMetabolisms[:ln-1] + break + } + } + + return nil +} + +// CompoundRecords retrieves all the records using an executor. +func CompoundRecords(mods ...qm.QueryMod) compoundRecordQuery { + mods = append(mods, qm.From("\"compound_records\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"compound_records\".*"}) + } + + return compoundRecordQuery{q} +} + +// FindCompoundRecord retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindCompoundRecord(ctx context.Context, exec boil.ContextExecutor, recordID int64, selectCols ...string) (*CompoundRecord, error) { + compoundRecordObj := &CompoundRecord{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"compound_records\" where \"record_id\"=?", sel, + ) + + q := queries.Raw(query, recordID) + + err := q.Bind(ctx, exec, compoundRecordObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from compound_records") + } + + if err = compoundRecordObj.doAfterSelectHooks(ctx, exec); err != nil { + return compoundRecordObj, err + } + + return compoundRecordObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *CompoundRecord) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no compound_records provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(compoundRecordColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + compoundRecordInsertCacheMut.RLock() + cache, cached := compoundRecordInsertCache[key] + compoundRecordInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + compoundRecordAllColumns, + compoundRecordColumnsWithDefault, + compoundRecordColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(compoundRecordType, compoundRecordMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(compoundRecordType, compoundRecordMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"compound_records\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"compound_records\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into compound_records") + } + + if !cached { + compoundRecordInsertCacheMut.Lock() + compoundRecordInsertCache[key] = cache + compoundRecordInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the CompoundRecord. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *CompoundRecord) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + compoundRecordUpdateCacheMut.RLock() + cache, cached := compoundRecordUpdateCache[key] + compoundRecordUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + compoundRecordAllColumns, + compoundRecordPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update compound_records, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"compound_records\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, compoundRecordPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(compoundRecordType, compoundRecordMapping, append(wl, compoundRecordPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update compound_records row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for compound_records") + } + + if !cached { + compoundRecordUpdateCacheMut.Lock() + compoundRecordUpdateCache[key] = cache + compoundRecordUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q compoundRecordQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for compound_records") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for compound_records") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o CompoundRecordSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), compoundRecordPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"compound_records\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, compoundRecordPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in compoundRecord slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all compoundRecord") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *CompoundRecord) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no compound_records provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(compoundRecordColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + compoundRecordUpsertCacheMut.RLock() + cache, cached := compoundRecordUpsertCache[key] + compoundRecordUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + compoundRecordAllColumns, + compoundRecordColumnsWithDefault, + compoundRecordColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + compoundRecordAllColumns, + compoundRecordPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert compound_records, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(compoundRecordPrimaryKeyColumns)) + copy(conflict, compoundRecordPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"compound_records\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(compoundRecordType, compoundRecordMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(compoundRecordType, compoundRecordMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert compound_records") + } + + if !cached { + compoundRecordUpsertCacheMut.Lock() + compoundRecordUpsertCache[key] = cache + compoundRecordUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single CompoundRecord record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *CompoundRecord) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no CompoundRecord provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), compoundRecordPrimaryKeyMapping) + sql := "DELETE FROM \"compound_records\" WHERE \"record_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from compound_records") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for compound_records") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q compoundRecordQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no compoundRecordQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from compound_records") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for compound_records") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o CompoundRecordSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(compoundRecordBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), compoundRecordPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"compound_records\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, compoundRecordPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from compoundRecord slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for compound_records") + } + + if len(compoundRecordAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *CompoundRecord) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindCompoundRecord(ctx, exec, o.RecordID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *CompoundRecordSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := CompoundRecordSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), compoundRecordPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"compound_records\".* FROM \"compound_records\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, compoundRecordPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in CompoundRecordSlice") + } + + *o = slice + + return nil +} + +// CompoundRecordExists checks if the CompoundRecord row exists. +func CompoundRecordExists(ctx context.Context, exec boil.ContextExecutor, recordID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"compound_records\" where \"record_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, recordID) + } + row := exec.QueryRowContext(ctx, sql, recordID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if compound_records exists") + } + + return exists, nil +} diff --git a/models/compound_structural_alerts.go b/models/compound_structural_alerts.go new file mode 100644 index 0000000..9f478ff --- /dev/null +++ b/models/compound_structural_alerts.go @@ -0,0 +1,1241 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// CompoundStructuralAlert is an object representing the database table. +type CompoundStructuralAlert struct { + CPDSTRAlertID int64 `boil:"cpd_str_alert_id" json:"cpd_str_alert_id" toml:"cpd_str_alert_id" yaml:"cpd_str_alert_id"` + Molregno int64 `boil:"molregno" json:"molregno" toml:"molregno" yaml:"molregno"` + AlertID int64 `boil:"alert_id" json:"alert_id" toml:"alert_id" yaml:"alert_id"` + + R *compoundStructuralAlertR `boil:"-" json:"-" toml:"-" yaml:"-"` + L compoundStructuralAlertL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var CompoundStructuralAlertColumns = struct { + CPDSTRAlertID string + Molregno string + AlertID string +}{ + CPDSTRAlertID: "cpd_str_alert_id", + Molregno: "molregno", + AlertID: "alert_id", +} + +var CompoundStructuralAlertTableColumns = struct { + CPDSTRAlertID string + Molregno string + AlertID string +}{ + CPDSTRAlertID: "compound_structural_alerts.cpd_str_alert_id", + Molregno: "compound_structural_alerts.molregno", + AlertID: "compound_structural_alerts.alert_id", +} + +// Generated where + +var CompoundStructuralAlertWhere = struct { + CPDSTRAlertID whereHelperint64 + Molregno whereHelperint64 + AlertID whereHelperint64 +}{ + CPDSTRAlertID: whereHelperint64{field: "\"compound_structural_alerts\".\"cpd_str_alert_id\""}, + Molregno: whereHelperint64{field: "\"compound_structural_alerts\".\"molregno\""}, + AlertID: whereHelperint64{field: "\"compound_structural_alerts\".\"alert_id\""}, +} + +// CompoundStructuralAlertRels is where relationship names are stored. +var CompoundStructuralAlertRels = struct { + MolregnoMoleculeDictionary string + Alert string +}{ + MolregnoMoleculeDictionary: "MolregnoMoleculeDictionary", + Alert: "Alert", +} + +// compoundStructuralAlertR is where relationships are stored. +type compoundStructuralAlertR struct { + MolregnoMoleculeDictionary *MoleculeDictionary `boil:"MolregnoMoleculeDictionary" json:"MolregnoMoleculeDictionary" toml:"MolregnoMoleculeDictionary" yaml:"MolregnoMoleculeDictionary"` + Alert *StructuralAlert `boil:"Alert" json:"Alert" toml:"Alert" yaml:"Alert"` +} + +// NewStruct creates a new relationship struct +func (*compoundStructuralAlertR) NewStruct() *compoundStructuralAlertR { + return &compoundStructuralAlertR{} +} + +func (r *compoundStructuralAlertR) GetMolregnoMoleculeDictionary() *MoleculeDictionary { + if r == nil { + return nil + } + return r.MolregnoMoleculeDictionary +} + +func (r *compoundStructuralAlertR) GetAlert() *StructuralAlert { + if r == nil { + return nil + } + return r.Alert +} + +// compoundStructuralAlertL is where Load methods for each relationship are stored. +type compoundStructuralAlertL struct{} + +var ( + compoundStructuralAlertAllColumns = []string{"cpd_str_alert_id", "molregno", "alert_id"} + compoundStructuralAlertColumnsWithoutDefault = []string{"cpd_str_alert_id", "molregno", "alert_id"} + compoundStructuralAlertColumnsWithDefault = []string{} + compoundStructuralAlertPrimaryKeyColumns = []string{"cpd_str_alert_id"} + compoundStructuralAlertGeneratedColumns = []string{} +) + +type ( + // CompoundStructuralAlertSlice is an alias for a slice of pointers to CompoundStructuralAlert. + // This should almost always be used instead of []CompoundStructuralAlert. + CompoundStructuralAlertSlice []*CompoundStructuralAlert + // CompoundStructuralAlertHook is the signature for custom CompoundStructuralAlert hook methods + CompoundStructuralAlertHook func(context.Context, boil.ContextExecutor, *CompoundStructuralAlert) error + + compoundStructuralAlertQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + compoundStructuralAlertType = reflect.TypeOf(&CompoundStructuralAlert{}) + compoundStructuralAlertMapping = queries.MakeStructMapping(compoundStructuralAlertType) + compoundStructuralAlertPrimaryKeyMapping, _ = queries.BindMapping(compoundStructuralAlertType, compoundStructuralAlertMapping, compoundStructuralAlertPrimaryKeyColumns) + compoundStructuralAlertInsertCacheMut sync.RWMutex + compoundStructuralAlertInsertCache = make(map[string]insertCache) + compoundStructuralAlertUpdateCacheMut sync.RWMutex + compoundStructuralAlertUpdateCache = make(map[string]updateCache) + compoundStructuralAlertUpsertCacheMut sync.RWMutex + compoundStructuralAlertUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var compoundStructuralAlertAfterSelectHooks []CompoundStructuralAlertHook + +var compoundStructuralAlertBeforeInsertHooks []CompoundStructuralAlertHook +var compoundStructuralAlertAfterInsertHooks []CompoundStructuralAlertHook + +var compoundStructuralAlertBeforeUpdateHooks []CompoundStructuralAlertHook +var compoundStructuralAlertAfterUpdateHooks []CompoundStructuralAlertHook + +var compoundStructuralAlertBeforeDeleteHooks []CompoundStructuralAlertHook +var compoundStructuralAlertAfterDeleteHooks []CompoundStructuralAlertHook + +var compoundStructuralAlertBeforeUpsertHooks []CompoundStructuralAlertHook +var compoundStructuralAlertAfterUpsertHooks []CompoundStructuralAlertHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *CompoundStructuralAlert) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range compoundStructuralAlertAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *CompoundStructuralAlert) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range compoundStructuralAlertBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *CompoundStructuralAlert) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range compoundStructuralAlertAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *CompoundStructuralAlert) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range compoundStructuralAlertBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *CompoundStructuralAlert) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range compoundStructuralAlertAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *CompoundStructuralAlert) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range compoundStructuralAlertBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *CompoundStructuralAlert) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range compoundStructuralAlertAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *CompoundStructuralAlert) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range compoundStructuralAlertBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *CompoundStructuralAlert) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range compoundStructuralAlertAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddCompoundStructuralAlertHook registers your hook function for all future operations. +func AddCompoundStructuralAlertHook(hookPoint boil.HookPoint, compoundStructuralAlertHook CompoundStructuralAlertHook) { + switch hookPoint { + case boil.AfterSelectHook: + compoundStructuralAlertAfterSelectHooks = append(compoundStructuralAlertAfterSelectHooks, compoundStructuralAlertHook) + case boil.BeforeInsertHook: + compoundStructuralAlertBeforeInsertHooks = append(compoundStructuralAlertBeforeInsertHooks, compoundStructuralAlertHook) + case boil.AfterInsertHook: + compoundStructuralAlertAfterInsertHooks = append(compoundStructuralAlertAfterInsertHooks, compoundStructuralAlertHook) + case boil.BeforeUpdateHook: + compoundStructuralAlertBeforeUpdateHooks = append(compoundStructuralAlertBeforeUpdateHooks, compoundStructuralAlertHook) + case boil.AfterUpdateHook: + compoundStructuralAlertAfterUpdateHooks = append(compoundStructuralAlertAfterUpdateHooks, compoundStructuralAlertHook) + case boil.BeforeDeleteHook: + compoundStructuralAlertBeforeDeleteHooks = append(compoundStructuralAlertBeforeDeleteHooks, compoundStructuralAlertHook) + case boil.AfterDeleteHook: + compoundStructuralAlertAfterDeleteHooks = append(compoundStructuralAlertAfterDeleteHooks, compoundStructuralAlertHook) + case boil.BeforeUpsertHook: + compoundStructuralAlertBeforeUpsertHooks = append(compoundStructuralAlertBeforeUpsertHooks, compoundStructuralAlertHook) + case boil.AfterUpsertHook: + compoundStructuralAlertAfterUpsertHooks = append(compoundStructuralAlertAfterUpsertHooks, compoundStructuralAlertHook) + } +} + +// One returns a single compoundStructuralAlert record from the query. +func (q compoundStructuralAlertQuery) One(ctx context.Context, exec boil.ContextExecutor) (*CompoundStructuralAlert, error) { + o := &CompoundStructuralAlert{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for compound_structural_alerts") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all CompoundStructuralAlert records from the query. +func (q compoundStructuralAlertQuery) All(ctx context.Context, exec boil.ContextExecutor) (CompoundStructuralAlertSlice, error) { + var o []*CompoundStructuralAlert + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to CompoundStructuralAlert slice") + } + + if len(compoundStructuralAlertAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all CompoundStructuralAlert records in the query. +func (q compoundStructuralAlertQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count compound_structural_alerts rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q compoundStructuralAlertQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if compound_structural_alerts exists") + } + + return count > 0, nil +} + +// MolregnoMoleculeDictionary pointed to by the foreign key. +func (o *CompoundStructuralAlert) MolregnoMoleculeDictionary(mods ...qm.QueryMod) moleculeDictionaryQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"molregno\" = ?", o.Molregno), + } + + queryMods = append(queryMods, mods...) + + return MoleculeDictionaries(queryMods...) +} + +// Alert pointed to by the foreign key. +func (o *CompoundStructuralAlert) Alert(mods ...qm.QueryMod) structuralAlertQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"alert_id\" = ?", o.AlertID), + } + + queryMods = append(queryMods, mods...) + + return StructuralAlerts(queryMods...) +} + +// LoadMolregnoMoleculeDictionary allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (compoundStructuralAlertL) LoadMolregnoMoleculeDictionary(ctx context.Context, e boil.ContextExecutor, singular bool, maybeCompoundStructuralAlert interface{}, mods queries.Applicator) error { + var slice []*CompoundStructuralAlert + var object *CompoundStructuralAlert + + if singular { + object = maybeCompoundStructuralAlert.(*CompoundStructuralAlert) + } else { + slice = *maybeCompoundStructuralAlert.(*[]*CompoundStructuralAlert) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &compoundStructuralAlertR{} + } + args = append(args, object.Molregno) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &compoundStructuralAlertR{} + } + + for _, a := range args { + if a == obj.Molregno { + continue Outer + } + } + + args = append(args, obj.Molregno) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`molecule_dictionary`), + qm.WhereIn(`molecule_dictionary.molregno in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load MoleculeDictionary") + } + + var resultSlice []*MoleculeDictionary + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice MoleculeDictionary") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for molecule_dictionary") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for molecule_dictionary") + } + + if len(compoundStructuralAlertAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.MolregnoMoleculeDictionary = foreign + if foreign.R == nil { + foreign.R = &moleculeDictionaryR{} + } + foreign.R.MolregnoCompoundStructuralAlerts = append(foreign.R.MolregnoCompoundStructuralAlerts, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.Molregno == foreign.Molregno { + local.R.MolregnoMoleculeDictionary = foreign + if foreign.R == nil { + foreign.R = &moleculeDictionaryR{} + } + foreign.R.MolregnoCompoundStructuralAlerts = append(foreign.R.MolregnoCompoundStructuralAlerts, local) + break + } + } + } + + return nil +} + +// LoadAlert allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (compoundStructuralAlertL) LoadAlert(ctx context.Context, e boil.ContextExecutor, singular bool, maybeCompoundStructuralAlert interface{}, mods queries.Applicator) error { + var slice []*CompoundStructuralAlert + var object *CompoundStructuralAlert + + if singular { + object = maybeCompoundStructuralAlert.(*CompoundStructuralAlert) + } else { + slice = *maybeCompoundStructuralAlert.(*[]*CompoundStructuralAlert) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &compoundStructuralAlertR{} + } + args = append(args, object.AlertID) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &compoundStructuralAlertR{} + } + + for _, a := range args { + if a == obj.AlertID { + continue Outer + } + } + + args = append(args, obj.AlertID) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`structural_alerts`), + qm.WhereIn(`structural_alerts.alert_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load StructuralAlert") + } + + var resultSlice []*StructuralAlert + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice StructuralAlert") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for structural_alerts") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for structural_alerts") + } + + if len(compoundStructuralAlertAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Alert = foreign + if foreign.R == nil { + foreign.R = &structuralAlertR{} + } + foreign.R.AlertCompoundStructuralAlerts = append(foreign.R.AlertCompoundStructuralAlerts, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.AlertID == foreign.AlertID { + local.R.Alert = foreign + if foreign.R == nil { + foreign.R = &structuralAlertR{} + } + foreign.R.AlertCompoundStructuralAlerts = append(foreign.R.AlertCompoundStructuralAlerts, local) + break + } + } + } + + return nil +} + +// SetMolregnoMoleculeDictionary of the compoundStructuralAlert to the related item. +// Sets o.R.MolregnoMoleculeDictionary to related. +// Adds o to related.R.MolregnoCompoundStructuralAlerts. +func (o *CompoundStructuralAlert) SetMolregnoMoleculeDictionary(ctx context.Context, exec boil.ContextExecutor, insert bool, related *MoleculeDictionary) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"compound_structural_alerts\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"molregno"}), + strmangle.WhereClause("\"", "\"", 0, compoundStructuralAlertPrimaryKeyColumns), + ) + values := []interface{}{related.Molregno, o.CPDSTRAlertID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.Molregno = related.Molregno + if o.R == nil { + o.R = &compoundStructuralAlertR{ + MolregnoMoleculeDictionary: related, + } + } else { + o.R.MolregnoMoleculeDictionary = related + } + + if related.R == nil { + related.R = &moleculeDictionaryR{ + MolregnoCompoundStructuralAlerts: CompoundStructuralAlertSlice{o}, + } + } else { + related.R.MolregnoCompoundStructuralAlerts = append(related.R.MolregnoCompoundStructuralAlerts, o) + } + + return nil +} + +// SetAlert of the compoundStructuralAlert to the related item. +// Sets o.R.Alert to related. +// Adds o to related.R.AlertCompoundStructuralAlerts. +func (o *CompoundStructuralAlert) SetAlert(ctx context.Context, exec boil.ContextExecutor, insert bool, related *StructuralAlert) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"compound_structural_alerts\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"alert_id"}), + strmangle.WhereClause("\"", "\"", 0, compoundStructuralAlertPrimaryKeyColumns), + ) + values := []interface{}{related.AlertID, o.CPDSTRAlertID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.AlertID = related.AlertID + if o.R == nil { + o.R = &compoundStructuralAlertR{ + Alert: related, + } + } else { + o.R.Alert = related + } + + if related.R == nil { + related.R = &structuralAlertR{ + AlertCompoundStructuralAlerts: CompoundStructuralAlertSlice{o}, + } + } else { + related.R.AlertCompoundStructuralAlerts = append(related.R.AlertCompoundStructuralAlerts, o) + } + + return nil +} + +// CompoundStructuralAlerts retrieves all the records using an executor. +func CompoundStructuralAlerts(mods ...qm.QueryMod) compoundStructuralAlertQuery { + mods = append(mods, qm.From("\"compound_structural_alerts\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"compound_structural_alerts\".*"}) + } + + return compoundStructuralAlertQuery{q} +} + +// FindCompoundStructuralAlert retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindCompoundStructuralAlert(ctx context.Context, exec boil.ContextExecutor, cPDSTRAlertID int64, selectCols ...string) (*CompoundStructuralAlert, error) { + compoundStructuralAlertObj := &CompoundStructuralAlert{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"compound_structural_alerts\" where \"cpd_str_alert_id\"=?", sel, + ) + + q := queries.Raw(query, cPDSTRAlertID) + + err := q.Bind(ctx, exec, compoundStructuralAlertObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from compound_structural_alerts") + } + + if err = compoundStructuralAlertObj.doAfterSelectHooks(ctx, exec); err != nil { + return compoundStructuralAlertObj, err + } + + return compoundStructuralAlertObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *CompoundStructuralAlert) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no compound_structural_alerts provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(compoundStructuralAlertColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + compoundStructuralAlertInsertCacheMut.RLock() + cache, cached := compoundStructuralAlertInsertCache[key] + compoundStructuralAlertInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + compoundStructuralAlertAllColumns, + compoundStructuralAlertColumnsWithDefault, + compoundStructuralAlertColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(compoundStructuralAlertType, compoundStructuralAlertMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(compoundStructuralAlertType, compoundStructuralAlertMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"compound_structural_alerts\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"compound_structural_alerts\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into compound_structural_alerts") + } + + if !cached { + compoundStructuralAlertInsertCacheMut.Lock() + compoundStructuralAlertInsertCache[key] = cache + compoundStructuralAlertInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the CompoundStructuralAlert. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *CompoundStructuralAlert) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + compoundStructuralAlertUpdateCacheMut.RLock() + cache, cached := compoundStructuralAlertUpdateCache[key] + compoundStructuralAlertUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + compoundStructuralAlertAllColumns, + compoundStructuralAlertPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update compound_structural_alerts, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"compound_structural_alerts\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, compoundStructuralAlertPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(compoundStructuralAlertType, compoundStructuralAlertMapping, append(wl, compoundStructuralAlertPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update compound_structural_alerts row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for compound_structural_alerts") + } + + if !cached { + compoundStructuralAlertUpdateCacheMut.Lock() + compoundStructuralAlertUpdateCache[key] = cache + compoundStructuralAlertUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q compoundStructuralAlertQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for compound_structural_alerts") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for compound_structural_alerts") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o CompoundStructuralAlertSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), compoundStructuralAlertPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"compound_structural_alerts\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, compoundStructuralAlertPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in compoundStructuralAlert slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all compoundStructuralAlert") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *CompoundStructuralAlert) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no compound_structural_alerts provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(compoundStructuralAlertColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + compoundStructuralAlertUpsertCacheMut.RLock() + cache, cached := compoundStructuralAlertUpsertCache[key] + compoundStructuralAlertUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + compoundStructuralAlertAllColumns, + compoundStructuralAlertColumnsWithDefault, + compoundStructuralAlertColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + compoundStructuralAlertAllColumns, + compoundStructuralAlertPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert compound_structural_alerts, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(compoundStructuralAlertPrimaryKeyColumns)) + copy(conflict, compoundStructuralAlertPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"compound_structural_alerts\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(compoundStructuralAlertType, compoundStructuralAlertMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(compoundStructuralAlertType, compoundStructuralAlertMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert compound_structural_alerts") + } + + if !cached { + compoundStructuralAlertUpsertCacheMut.Lock() + compoundStructuralAlertUpsertCache[key] = cache + compoundStructuralAlertUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single CompoundStructuralAlert record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *CompoundStructuralAlert) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no CompoundStructuralAlert provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), compoundStructuralAlertPrimaryKeyMapping) + sql := "DELETE FROM \"compound_structural_alerts\" WHERE \"cpd_str_alert_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from compound_structural_alerts") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for compound_structural_alerts") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q compoundStructuralAlertQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no compoundStructuralAlertQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from compound_structural_alerts") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for compound_structural_alerts") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o CompoundStructuralAlertSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(compoundStructuralAlertBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), compoundStructuralAlertPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"compound_structural_alerts\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, compoundStructuralAlertPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from compoundStructuralAlert slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for compound_structural_alerts") + } + + if len(compoundStructuralAlertAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *CompoundStructuralAlert) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindCompoundStructuralAlert(ctx, exec, o.CPDSTRAlertID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *CompoundStructuralAlertSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := CompoundStructuralAlertSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), compoundStructuralAlertPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"compound_structural_alerts\".* FROM \"compound_structural_alerts\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, compoundStructuralAlertPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in CompoundStructuralAlertSlice") + } + + *o = slice + + return nil +} + +// CompoundStructuralAlertExists checks if the CompoundStructuralAlert row exists. +func CompoundStructuralAlertExists(ctx context.Context, exec boil.ContextExecutor, cPDSTRAlertID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"compound_structural_alerts\" where \"cpd_str_alert_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, cPDSTRAlertID) + } + row := exec.QueryRowContext(ctx, sql, cPDSTRAlertID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if compound_structural_alerts exists") + } + + return exists, nil +} diff --git a/models/compound_structures.go b/models/compound_structures.go new file mode 100644 index 0000000..3a466ee --- /dev/null +++ b/models/compound_structures.go @@ -0,0 +1,1084 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// CompoundStructure is an object representing the database table. +type CompoundStructure struct { + Molregno int64 `boil:"molregno" json:"molregno" toml:"molregno" yaml:"molregno"` + Molfile null.String `boil:"molfile" json:"molfile,omitempty" toml:"molfile" yaml:"molfile,omitempty"` + StandardInchi null.String `boil:"standard_inchi" json:"standard_inchi,omitempty" toml:"standard_inchi" yaml:"standard_inchi,omitempty"` + StandardInchiKey string `boil:"standard_inchi_key" json:"standard_inchi_key" toml:"standard_inchi_key" yaml:"standard_inchi_key"` + CanonicalSmiles null.String `boil:"canonical_smiles" json:"canonical_smiles,omitempty" toml:"canonical_smiles" yaml:"canonical_smiles,omitempty"` + + R *compoundStructureR `boil:"-" json:"-" toml:"-" yaml:"-"` + L compoundStructureL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var CompoundStructureColumns = struct { + Molregno string + Molfile string + StandardInchi string + StandardInchiKey string + CanonicalSmiles string +}{ + Molregno: "molregno", + Molfile: "molfile", + StandardInchi: "standard_inchi", + StandardInchiKey: "standard_inchi_key", + CanonicalSmiles: "canonical_smiles", +} + +var CompoundStructureTableColumns = struct { + Molregno string + Molfile string + StandardInchi string + StandardInchiKey string + CanonicalSmiles string +}{ + Molregno: "compound_structures.molregno", + Molfile: "compound_structures.molfile", + StandardInchi: "compound_structures.standard_inchi", + StandardInchiKey: "compound_structures.standard_inchi_key", + CanonicalSmiles: "compound_structures.canonical_smiles", +} + +// Generated where + +var CompoundStructureWhere = struct { + Molregno whereHelperint64 + Molfile whereHelpernull_String + StandardInchi whereHelpernull_String + StandardInchiKey whereHelperstring + CanonicalSmiles whereHelpernull_String +}{ + Molregno: whereHelperint64{field: "\"compound_structures\".\"molregno\""}, + Molfile: whereHelpernull_String{field: "\"compound_structures\".\"molfile\""}, + StandardInchi: whereHelpernull_String{field: "\"compound_structures\".\"standard_inchi\""}, + StandardInchiKey: whereHelperstring{field: "\"compound_structures\".\"standard_inchi_key\""}, + CanonicalSmiles: whereHelpernull_String{field: "\"compound_structures\".\"canonical_smiles\""}, +} + +// CompoundStructureRels is where relationship names are stored. +var CompoundStructureRels = struct { + MolregnoMoleculeDictionary string +}{ + MolregnoMoleculeDictionary: "MolregnoMoleculeDictionary", +} + +// compoundStructureR is where relationships are stored. +type compoundStructureR struct { + MolregnoMoleculeDictionary *MoleculeDictionary `boil:"MolregnoMoleculeDictionary" json:"MolregnoMoleculeDictionary" toml:"MolregnoMoleculeDictionary" yaml:"MolregnoMoleculeDictionary"` +} + +// NewStruct creates a new relationship struct +func (*compoundStructureR) NewStruct() *compoundStructureR { + return &compoundStructureR{} +} + +func (r *compoundStructureR) GetMolregnoMoleculeDictionary() *MoleculeDictionary { + if r == nil { + return nil + } + return r.MolregnoMoleculeDictionary +} + +// compoundStructureL is where Load methods for each relationship are stored. +type compoundStructureL struct{} + +var ( + compoundStructureAllColumns = []string{"molregno", "molfile", "standard_inchi", "standard_inchi_key", "canonical_smiles"} + compoundStructureColumnsWithoutDefault = []string{"molregno", "standard_inchi_key"} + compoundStructureColumnsWithDefault = []string{"molfile", "standard_inchi", "canonical_smiles"} + compoundStructurePrimaryKeyColumns = []string{"molregno"} + compoundStructureGeneratedColumns = []string{} +) + +type ( + // CompoundStructureSlice is an alias for a slice of pointers to CompoundStructure. + // This should almost always be used instead of []CompoundStructure. + CompoundStructureSlice []*CompoundStructure + // CompoundStructureHook is the signature for custom CompoundStructure hook methods + CompoundStructureHook func(context.Context, boil.ContextExecutor, *CompoundStructure) error + + compoundStructureQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + compoundStructureType = reflect.TypeOf(&CompoundStructure{}) + compoundStructureMapping = queries.MakeStructMapping(compoundStructureType) + compoundStructurePrimaryKeyMapping, _ = queries.BindMapping(compoundStructureType, compoundStructureMapping, compoundStructurePrimaryKeyColumns) + compoundStructureInsertCacheMut sync.RWMutex + compoundStructureInsertCache = make(map[string]insertCache) + compoundStructureUpdateCacheMut sync.RWMutex + compoundStructureUpdateCache = make(map[string]updateCache) + compoundStructureUpsertCacheMut sync.RWMutex + compoundStructureUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var compoundStructureAfterSelectHooks []CompoundStructureHook + +var compoundStructureBeforeInsertHooks []CompoundStructureHook +var compoundStructureAfterInsertHooks []CompoundStructureHook + +var compoundStructureBeforeUpdateHooks []CompoundStructureHook +var compoundStructureAfterUpdateHooks []CompoundStructureHook + +var compoundStructureBeforeDeleteHooks []CompoundStructureHook +var compoundStructureAfterDeleteHooks []CompoundStructureHook + +var compoundStructureBeforeUpsertHooks []CompoundStructureHook +var compoundStructureAfterUpsertHooks []CompoundStructureHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *CompoundStructure) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range compoundStructureAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *CompoundStructure) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range compoundStructureBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *CompoundStructure) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range compoundStructureAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *CompoundStructure) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range compoundStructureBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *CompoundStructure) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range compoundStructureAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *CompoundStructure) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range compoundStructureBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *CompoundStructure) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range compoundStructureAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *CompoundStructure) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range compoundStructureBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *CompoundStructure) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range compoundStructureAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddCompoundStructureHook registers your hook function for all future operations. +func AddCompoundStructureHook(hookPoint boil.HookPoint, compoundStructureHook CompoundStructureHook) { + switch hookPoint { + case boil.AfterSelectHook: + compoundStructureAfterSelectHooks = append(compoundStructureAfterSelectHooks, compoundStructureHook) + case boil.BeforeInsertHook: + compoundStructureBeforeInsertHooks = append(compoundStructureBeforeInsertHooks, compoundStructureHook) + case boil.AfterInsertHook: + compoundStructureAfterInsertHooks = append(compoundStructureAfterInsertHooks, compoundStructureHook) + case boil.BeforeUpdateHook: + compoundStructureBeforeUpdateHooks = append(compoundStructureBeforeUpdateHooks, compoundStructureHook) + case boil.AfterUpdateHook: + compoundStructureAfterUpdateHooks = append(compoundStructureAfterUpdateHooks, compoundStructureHook) + case boil.BeforeDeleteHook: + compoundStructureBeforeDeleteHooks = append(compoundStructureBeforeDeleteHooks, compoundStructureHook) + case boil.AfterDeleteHook: + compoundStructureAfterDeleteHooks = append(compoundStructureAfterDeleteHooks, compoundStructureHook) + case boil.BeforeUpsertHook: + compoundStructureBeforeUpsertHooks = append(compoundStructureBeforeUpsertHooks, compoundStructureHook) + case boil.AfterUpsertHook: + compoundStructureAfterUpsertHooks = append(compoundStructureAfterUpsertHooks, compoundStructureHook) + } +} + +// One returns a single compoundStructure record from the query. +func (q compoundStructureQuery) One(ctx context.Context, exec boil.ContextExecutor) (*CompoundStructure, error) { + o := &CompoundStructure{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for compound_structures") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all CompoundStructure records from the query. +func (q compoundStructureQuery) All(ctx context.Context, exec boil.ContextExecutor) (CompoundStructureSlice, error) { + var o []*CompoundStructure + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to CompoundStructure slice") + } + + if len(compoundStructureAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all CompoundStructure records in the query. +func (q compoundStructureQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count compound_structures rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q compoundStructureQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if compound_structures exists") + } + + return count > 0, nil +} + +// MolregnoMoleculeDictionary pointed to by the foreign key. +func (o *CompoundStructure) MolregnoMoleculeDictionary(mods ...qm.QueryMod) moleculeDictionaryQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"molregno\" = ?", o.Molregno), + } + + queryMods = append(queryMods, mods...) + + return MoleculeDictionaries(queryMods...) +} + +// LoadMolregnoMoleculeDictionary allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (compoundStructureL) LoadMolregnoMoleculeDictionary(ctx context.Context, e boil.ContextExecutor, singular bool, maybeCompoundStructure interface{}, mods queries.Applicator) error { + var slice []*CompoundStructure + var object *CompoundStructure + + if singular { + object = maybeCompoundStructure.(*CompoundStructure) + } else { + slice = *maybeCompoundStructure.(*[]*CompoundStructure) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &compoundStructureR{} + } + args = append(args, object.Molregno) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &compoundStructureR{} + } + + for _, a := range args { + if a == obj.Molregno { + continue Outer + } + } + + args = append(args, obj.Molregno) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`molecule_dictionary`), + qm.WhereIn(`molecule_dictionary.molregno in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load MoleculeDictionary") + } + + var resultSlice []*MoleculeDictionary + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice MoleculeDictionary") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for molecule_dictionary") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for molecule_dictionary") + } + + if len(compoundStructureAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.MolregnoMoleculeDictionary = foreign + if foreign.R == nil { + foreign.R = &moleculeDictionaryR{} + } + foreign.R.MolregnoCompoundStructure = object + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.Molregno == foreign.Molregno { + local.R.MolregnoMoleculeDictionary = foreign + if foreign.R == nil { + foreign.R = &moleculeDictionaryR{} + } + foreign.R.MolregnoCompoundStructure = local + break + } + } + } + + return nil +} + +// SetMolregnoMoleculeDictionary of the compoundStructure to the related item. +// Sets o.R.MolregnoMoleculeDictionary to related. +// Adds o to related.R.MolregnoCompoundStructure. +func (o *CompoundStructure) SetMolregnoMoleculeDictionary(ctx context.Context, exec boil.ContextExecutor, insert bool, related *MoleculeDictionary) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"compound_structures\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"molregno"}), + strmangle.WhereClause("\"", "\"", 0, compoundStructurePrimaryKeyColumns), + ) + values := []interface{}{related.Molregno, o.Molregno} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.Molregno = related.Molregno + if o.R == nil { + o.R = &compoundStructureR{ + MolregnoMoleculeDictionary: related, + } + } else { + o.R.MolregnoMoleculeDictionary = related + } + + if related.R == nil { + related.R = &moleculeDictionaryR{ + MolregnoCompoundStructure: o, + } + } else { + related.R.MolregnoCompoundStructure = o + } + + return nil +} + +// CompoundStructures retrieves all the records using an executor. +func CompoundStructures(mods ...qm.QueryMod) compoundStructureQuery { + mods = append(mods, qm.From("\"compound_structures\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"compound_structures\".*"}) + } + + return compoundStructureQuery{q} +} + +// FindCompoundStructure retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindCompoundStructure(ctx context.Context, exec boil.ContextExecutor, molregno int64, selectCols ...string) (*CompoundStructure, error) { + compoundStructureObj := &CompoundStructure{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"compound_structures\" where \"molregno\"=?", sel, + ) + + q := queries.Raw(query, molregno) + + err := q.Bind(ctx, exec, compoundStructureObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from compound_structures") + } + + if err = compoundStructureObj.doAfterSelectHooks(ctx, exec); err != nil { + return compoundStructureObj, err + } + + return compoundStructureObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *CompoundStructure) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no compound_structures provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(compoundStructureColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + compoundStructureInsertCacheMut.RLock() + cache, cached := compoundStructureInsertCache[key] + compoundStructureInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + compoundStructureAllColumns, + compoundStructureColumnsWithDefault, + compoundStructureColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(compoundStructureType, compoundStructureMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(compoundStructureType, compoundStructureMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"compound_structures\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"compound_structures\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into compound_structures") + } + + if !cached { + compoundStructureInsertCacheMut.Lock() + compoundStructureInsertCache[key] = cache + compoundStructureInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the CompoundStructure. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *CompoundStructure) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + compoundStructureUpdateCacheMut.RLock() + cache, cached := compoundStructureUpdateCache[key] + compoundStructureUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + compoundStructureAllColumns, + compoundStructurePrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update compound_structures, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"compound_structures\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, compoundStructurePrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(compoundStructureType, compoundStructureMapping, append(wl, compoundStructurePrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update compound_structures row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for compound_structures") + } + + if !cached { + compoundStructureUpdateCacheMut.Lock() + compoundStructureUpdateCache[key] = cache + compoundStructureUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q compoundStructureQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for compound_structures") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for compound_structures") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o CompoundStructureSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), compoundStructurePrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"compound_structures\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, compoundStructurePrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in compoundStructure slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all compoundStructure") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *CompoundStructure) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no compound_structures provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(compoundStructureColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + compoundStructureUpsertCacheMut.RLock() + cache, cached := compoundStructureUpsertCache[key] + compoundStructureUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + compoundStructureAllColumns, + compoundStructureColumnsWithDefault, + compoundStructureColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + compoundStructureAllColumns, + compoundStructurePrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert compound_structures, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(compoundStructurePrimaryKeyColumns)) + copy(conflict, compoundStructurePrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"compound_structures\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(compoundStructureType, compoundStructureMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(compoundStructureType, compoundStructureMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert compound_structures") + } + + if !cached { + compoundStructureUpsertCacheMut.Lock() + compoundStructureUpsertCache[key] = cache + compoundStructureUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single CompoundStructure record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *CompoundStructure) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no CompoundStructure provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), compoundStructurePrimaryKeyMapping) + sql := "DELETE FROM \"compound_structures\" WHERE \"molregno\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from compound_structures") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for compound_structures") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q compoundStructureQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no compoundStructureQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from compound_structures") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for compound_structures") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o CompoundStructureSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(compoundStructureBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), compoundStructurePrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"compound_structures\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, compoundStructurePrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from compoundStructure slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for compound_structures") + } + + if len(compoundStructureAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *CompoundStructure) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindCompoundStructure(ctx, exec, o.Molregno) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *CompoundStructureSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := CompoundStructureSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), compoundStructurePrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"compound_structures\".* FROM \"compound_structures\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, compoundStructurePrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in CompoundStructureSlice") + } + + *o = slice + + return nil +} + +// CompoundStructureExists checks if the CompoundStructure row exists. +func CompoundStructureExists(ctx context.Context, exec boil.ContextExecutor, molregno int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"compound_structures\" where \"molregno\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, molregno) + } + row := exec.QueryRowContext(ctx, sql, molregno) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if compound_structures exists") + } + + return exists, nil +} diff --git a/models/confidence_score_lookup.go b/models/confidence_score_lookup.go new file mode 100644 index 0000000..988ed80 --- /dev/null +++ b/models/confidence_score_lookup.go @@ -0,0 +1,1146 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// ConfidenceScoreLookup is an object representing the database table. +type ConfidenceScoreLookup struct { + ConfidenceScore int16 `boil:"confidence_score" json:"confidence_score" toml:"confidence_score" yaml:"confidence_score"` + Description string `boil:"description" json:"description" toml:"description" yaml:"description"` + TargetMapping string `boil:"target_mapping" json:"target_mapping" toml:"target_mapping" yaml:"target_mapping"` + + R *confidenceScoreLookupR `boil:"-" json:"-" toml:"-" yaml:"-"` + L confidenceScoreLookupL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var ConfidenceScoreLookupColumns = struct { + ConfidenceScore string + Description string + TargetMapping string +}{ + ConfidenceScore: "confidence_score", + Description: "description", + TargetMapping: "target_mapping", +} + +var ConfidenceScoreLookupTableColumns = struct { + ConfidenceScore string + Description string + TargetMapping string +}{ + ConfidenceScore: "confidence_score_lookup.confidence_score", + Description: "confidence_score_lookup.description", + TargetMapping: "confidence_score_lookup.target_mapping", +} + +// Generated where + +var ConfidenceScoreLookupWhere = struct { + ConfidenceScore whereHelperint16 + Description whereHelperstring + TargetMapping whereHelperstring +}{ + ConfidenceScore: whereHelperint16{field: "\"confidence_score_lookup\".\"confidence_score\""}, + Description: whereHelperstring{field: "\"confidence_score_lookup\".\"description\""}, + TargetMapping: whereHelperstring{field: "\"confidence_score_lookup\".\"target_mapping\""}, +} + +// ConfidenceScoreLookupRels is where relationship names are stored. +var ConfidenceScoreLookupRels = struct { + ConfidenceScoreAssays string +}{ + ConfidenceScoreAssays: "ConfidenceScoreAssays", +} + +// confidenceScoreLookupR is where relationships are stored. +type confidenceScoreLookupR struct { + ConfidenceScoreAssays AssaySlice `boil:"ConfidenceScoreAssays" json:"ConfidenceScoreAssays" toml:"ConfidenceScoreAssays" yaml:"ConfidenceScoreAssays"` +} + +// NewStruct creates a new relationship struct +func (*confidenceScoreLookupR) NewStruct() *confidenceScoreLookupR { + return &confidenceScoreLookupR{} +} + +func (r *confidenceScoreLookupR) GetConfidenceScoreAssays() AssaySlice { + if r == nil { + return nil + } + return r.ConfidenceScoreAssays +} + +// confidenceScoreLookupL is where Load methods for each relationship are stored. +type confidenceScoreLookupL struct{} + +var ( + confidenceScoreLookupAllColumns = []string{"confidence_score", "description", "target_mapping"} + confidenceScoreLookupColumnsWithoutDefault = []string{"confidence_score", "description", "target_mapping"} + confidenceScoreLookupColumnsWithDefault = []string{} + confidenceScoreLookupPrimaryKeyColumns = []string{"confidence_score"} + confidenceScoreLookupGeneratedColumns = []string{} +) + +type ( + // ConfidenceScoreLookupSlice is an alias for a slice of pointers to ConfidenceScoreLookup. + // This should almost always be used instead of []ConfidenceScoreLookup. + ConfidenceScoreLookupSlice []*ConfidenceScoreLookup + // ConfidenceScoreLookupHook is the signature for custom ConfidenceScoreLookup hook methods + ConfidenceScoreLookupHook func(context.Context, boil.ContextExecutor, *ConfidenceScoreLookup) error + + confidenceScoreLookupQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + confidenceScoreLookupType = reflect.TypeOf(&ConfidenceScoreLookup{}) + confidenceScoreLookupMapping = queries.MakeStructMapping(confidenceScoreLookupType) + confidenceScoreLookupPrimaryKeyMapping, _ = queries.BindMapping(confidenceScoreLookupType, confidenceScoreLookupMapping, confidenceScoreLookupPrimaryKeyColumns) + confidenceScoreLookupInsertCacheMut sync.RWMutex + confidenceScoreLookupInsertCache = make(map[string]insertCache) + confidenceScoreLookupUpdateCacheMut sync.RWMutex + confidenceScoreLookupUpdateCache = make(map[string]updateCache) + confidenceScoreLookupUpsertCacheMut sync.RWMutex + confidenceScoreLookupUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var confidenceScoreLookupAfterSelectHooks []ConfidenceScoreLookupHook + +var confidenceScoreLookupBeforeInsertHooks []ConfidenceScoreLookupHook +var confidenceScoreLookupAfterInsertHooks []ConfidenceScoreLookupHook + +var confidenceScoreLookupBeforeUpdateHooks []ConfidenceScoreLookupHook +var confidenceScoreLookupAfterUpdateHooks []ConfidenceScoreLookupHook + +var confidenceScoreLookupBeforeDeleteHooks []ConfidenceScoreLookupHook +var confidenceScoreLookupAfterDeleteHooks []ConfidenceScoreLookupHook + +var confidenceScoreLookupBeforeUpsertHooks []ConfidenceScoreLookupHook +var confidenceScoreLookupAfterUpsertHooks []ConfidenceScoreLookupHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *ConfidenceScoreLookup) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range confidenceScoreLookupAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *ConfidenceScoreLookup) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range confidenceScoreLookupBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *ConfidenceScoreLookup) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range confidenceScoreLookupAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *ConfidenceScoreLookup) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range confidenceScoreLookupBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *ConfidenceScoreLookup) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range confidenceScoreLookupAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *ConfidenceScoreLookup) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range confidenceScoreLookupBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *ConfidenceScoreLookup) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range confidenceScoreLookupAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *ConfidenceScoreLookup) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range confidenceScoreLookupBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *ConfidenceScoreLookup) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range confidenceScoreLookupAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddConfidenceScoreLookupHook registers your hook function for all future operations. +func AddConfidenceScoreLookupHook(hookPoint boil.HookPoint, confidenceScoreLookupHook ConfidenceScoreLookupHook) { + switch hookPoint { + case boil.AfterSelectHook: + confidenceScoreLookupAfterSelectHooks = append(confidenceScoreLookupAfterSelectHooks, confidenceScoreLookupHook) + case boil.BeforeInsertHook: + confidenceScoreLookupBeforeInsertHooks = append(confidenceScoreLookupBeforeInsertHooks, confidenceScoreLookupHook) + case boil.AfterInsertHook: + confidenceScoreLookupAfterInsertHooks = append(confidenceScoreLookupAfterInsertHooks, confidenceScoreLookupHook) + case boil.BeforeUpdateHook: + confidenceScoreLookupBeforeUpdateHooks = append(confidenceScoreLookupBeforeUpdateHooks, confidenceScoreLookupHook) + case boil.AfterUpdateHook: + confidenceScoreLookupAfterUpdateHooks = append(confidenceScoreLookupAfterUpdateHooks, confidenceScoreLookupHook) + case boil.BeforeDeleteHook: + confidenceScoreLookupBeforeDeleteHooks = append(confidenceScoreLookupBeforeDeleteHooks, confidenceScoreLookupHook) + case boil.AfterDeleteHook: + confidenceScoreLookupAfterDeleteHooks = append(confidenceScoreLookupAfterDeleteHooks, confidenceScoreLookupHook) + case boil.BeforeUpsertHook: + confidenceScoreLookupBeforeUpsertHooks = append(confidenceScoreLookupBeforeUpsertHooks, confidenceScoreLookupHook) + case boil.AfterUpsertHook: + confidenceScoreLookupAfterUpsertHooks = append(confidenceScoreLookupAfterUpsertHooks, confidenceScoreLookupHook) + } +} + +// One returns a single confidenceScoreLookup record from the query. +func (q confidenceScoreLookupQuery) One(ctx context.Context, exec boil.ContextExecutor) (*ConfidenceScoreLookup, error) { + o := &ConfidenceScoreLookup{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for confidence_score_lookup") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all ConfidenceScoreLookup records from the query. +func (q confidenceScoreLookupQuery) All(ctx context.Context, exec boil.ContextExecutor) (ConfidenceScoreLookupSlice, error) { + var o []*ConfidenceScoreLookup + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to ConfidenceScoreLookup slice") + } + + if len(confidenceScoreLookupAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all ConfidenceScoreLookup records in the query. +func (q confidenceScoreLookupQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count confidence_score_lookup rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q confidenceScoreLookupQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if confidence_score_lookup exists") + } + + return count > 0, nil +} + +// ConfidenceScoreAssays retrieves all the assay's Assays with an executor via confidence_score column. +func (o *ConfidenceScoreLookup) ConfidenceScoreAssays(mods ...qm.QueryMod) assayQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"assays\".\"confidence_score\"=?", o.ConfidenceScore), + ) + + return Assays(queryMods...) +} + +// LoadConfidenceScoreAssays allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (confidenceScoreLookupL) LoadConfidenceScoreAssays(ctx context.Context, e boil.ContextExecutor, singular bool, maybeConfidenceScoreLookup interface{}, mods queries.Applicator) error { + var slice []*ConfidenceScoreLookup + var object *ConfidenceScoreLookup + + if singular { + object = maybeConfidenceScoreLookup.(*ConfidenceScoreLookup) + } else { + slice = *maybeConfidenceScoreLookup.(*[]*ConfidenceScoreLookup) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &confidenceScoreLookupR{} + } + args = append(args, object.ConfidenceScore) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &confidenceScoreLookupR{} + } + + for _, a := range args { + if queries.Equal(a, obj.ConfidenceScore) { + continue Outer + } + } + + args = append(args, obj.ConfidenceScore) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`assays`), + qm.WhereIn(`assays.confidence_score in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load assays") + } + + var resultSlice []*Assay + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice assays") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on assays") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for assays") + } + + if len(assayAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.ConfidenceScoreAssays = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &assayR{} + } + foreign.R.ConfidenceScoreConfidenceScoreLookup = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.ConfidenceScore, foreign.ConfidenceScore) { + local.R.ConfidenceScoreAssays = append(local.R.ConfidenceScoreAssays, foreign) + if foreign.R == nil { + foreign.R = &assayR{} + } + foreign.R.ConfidenceScoreConfidenceScoreLookup = local + break + } + } + } + + return nil +} + +// AddConfidenceScoreAssays adds the given related objects to the existing relationships +// of the confidence_score_lookup, optionally inserting them as new records. +// Appends related to o.R.ConfidenceScoreAssays. +// Sets related.R.ConfidenceScoreConfidenceScoreLookup appropriately. +func (o *ConfidenceScoreLookup) AddConfidenceScoreAssays(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Assay) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.ConfidenceScore, o.ConfidenceScore) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"assays\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"confidence_score"}), + strmangle.WhereClause("\"", "\"", 0, assayPrimaryKeyColumns), + ) + values := []interface{}{o.ConfidenceScore, rel.AssayID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.ConfidenceScore, o.ConfidenceScore) + } + } + + if o.R == nil { + o.R = &confidenceScoreLookupR{ + ConfidenceScoreAssays: related, + } + } else { + o.R.ConfidenceScoreAssays = append(o.R.ConfidenceScoreAssays, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &assayR{ + ConfidenceScoreConfidenceScoreLookup: o, + } + } else { + rel.R.ConfidenceScoreConfidenceScoreLookup = o + } + } + return nil +} + +// SetConfidenceScoreAssays removes all previously related items of the +// confidence_score_lookup replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.ConfidenceScoreConfidenceScoreLookup's ConfidenceScoreAssays accordingly. +// Replaces o.R.ConfidenceScoreAssays with related. +// Sets related.R.ConfidenceScoreConfidenceScoreLookup's ConfidenceScoreAssays accordingly. +func (o *ConfidenceScoreLookup) SetConfidenceScoreAssays(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Assay) error { + query := "update \"assays\" set \"confidence_score\" = null where \"confidence_score\" = ?" + values := []interface{}{o.ConfidenceScore} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + if o.R != nil { + for _, rel := range o.R.ConfidenceScoreAssays { + queries.SetScanner(&rel.ConfidenceScore, nil) + if rel.R == nil { + continue + } + + rel.R.ConfidenceScoreConfidenceScoreLookup = nil + } + o.R.ConfidenceScoreAssays = nil + } + + return o.AddConfidenceScoreAssays(ctx, exec, insert, related...) +} + +// RemoveConfidenceScoreAssays relationships from objects passed in. +// Removes related items from R.ConfidenceScoreAssays (uses pointer comparison, removal does not keep order) +// Sets related.R.ConfidenceScoreConfidenceScoreLookup. +func (o *ConfidenceScoreLookup) RemoveConfidenceScoreAssays(ctx context.Context, exec boil.ContextExecutor, related ...*Assay) error { + if len(related) == 0 { + return nil + } + + var err error + for _, rel := range related { + queries.SetScanner(&rel.ConfidenceScore, nil) + if rel.R != nil { + rel.R.ConfidenceScoreConfidenceScoreLookup = nil + } + if _, err = rel.Update(ctx, exec, boil.Whitelist("confidence_score")); err != nil { + return err + } + } + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.ConfidenceScoreAssays { + if rel != ri { + continue + } + + ln := len(o.R.ConfidenceScoreAssays) + if ln > 1 && i < ln-1 { + o.R.ConfidenceScoreAssays[i] = o.R.ConfidenceScoreAssays[ln-1] + } + o.R.ConfidenceScoreAssays = o.R.ConfidenceScoreAssays[:ln-1] + break + } + } + + return nil +} + +// ConfidenceScoreLookups retrieves all the records using an executor. +func ConfidenceScoreLookups(mods ...qm.QueryMod) confidenceScoreLookupQuery { + mods = append(mods, qm.From("\"confidence_score_lookup\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"confidence_score_lookup\".*"}) + } + + return confidenceScoreLookupQuery{q} +} + +// FindConfidenceScoreLookup retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindConfidenceScoreLookup(ctx context.Context, exec boil.ContextExecutor, confidenceScore int16, selectCols ...string) (*ConfidenceScoreLookup, error) { + confidenceScoreLookupObj := &ConfidenceScoreLookup{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"confidence_score_lookup\" where \"confidence_score\"=?", sel, + ) + + q := queries.Raw(query, confidenceScore) + + err := q.Bind(ctx, exec, confidenceScoreLookupObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from confidence_score_lookup") + } + + if err = confidenceScoreLookupObj.doAfterSelectHooks(ctx, exec); err != nil { + return confidenceScoreLookupObj, err + } + + return confidenceScoreLookupObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *ConfidenceScoreLookup) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no confidence_score_lookup provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(confidenceScoreLookupColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + confidenceScoreLookupInsertCacheMut.RLock() + cache, cached := confidenceScoreLookupInsertCache[key] + confidenceScoreLookupInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + confidenceScoreLookupAllColumns, + confidenceScoreLookupColumnsWithDefault, + confidenceScoreLookupColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(confidenceScoreLookupType, confidenceScoreLookupMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(confidenceScoreLookupType, confidenceScoreLookupMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"confidence_score_lookup\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"confidence_score_lookup\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into confidence_score_lookup") + } + + if !cached { + confidenceScoreLookupInsertCacheMut.Lock() + confidenceScoreLookupInsertCache[key] = cache + confidenceScoreLookupInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the ConfidenceScoreLookup. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *ConfidenceScoreLookup) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + confidenceScoreLookupUpdateCacheMut.RLock() + cache, cached := confidenceScoreLookupUpdateCache[key] + confidenceScoreLookupUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + confidenceScoreLookupAllColumns, + confidenceScoreLookupPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update confidence_score_lookup, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"confidence_score_lookup\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, confidenceScoreLookupPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(confidenceScoreLookupType, confidenceScoreLookupMapping, append(wl, confidenceScoreLookupPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update confidence_score_lookup row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for confidence_score_lookup") + } + + if !cached { + confidenceScoreLookupUpdateCacheMut.Lock() + confidenceScoreLookupUpdateCache[key] = cache + confidenceScoreLookupUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q confidenceScoreLookupQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for confidence_score_lookup") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for confidence_score_lookup") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o ConfidenceScoreLookupSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), confidenceScoreLookupPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"confidence_score_lookup\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, confidenceScoreLookupPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in confidenceScoreLookup slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all confidenceScoreLookup") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *ConfidenceScoreLookup) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no confidence_score_lookup provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(confidenceScoreLookupColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + confidenceScoreLookupUpsertCacheMut.RLock() + cache, cached := confidenceScoreLookupUpsertCache[key] + confidenceScoreLookupUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + confidenceScoreLookupAllColumns, + confidenceScoreLookupColumnsWithDefault, + confidenceScoreLookupColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + confidenceScoreLookupAllColumns, + confidenceScoreLookupPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert confidence_score_lookup, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(confidenceScoreLookupPrimaryKeyColumns)) + copy(conflict, confidenceScoreLookupPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"confidence_score_lookup\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(confidenceScoreLookupType, confidenceScoreLookupMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(confidenceScoreLookupType, confidenceScoreLookupMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert confidence_score_lookup") + } + + if !cached { + confidenceScoreLookupUpsertCacheMut.Lock() + confidenceScoreLookupUpsertCache[key] = cache + confidenceScoreLookupUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single ConfidenceScoreLookup record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *ConfidenceScoreLookup) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no ConfidenceScoreLookup provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), confidenceScoreLookupPrimaryKeyMapping) + sql := "DELETE FROM \"confidence_score_lookup\" WHERE \"confidence_score\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from confidence_score_lookup") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for confidence_score_lookup") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q confidenceScoreLookupQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no confidenceScoreLookupQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from confidence_score_lookup") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for confidence_score_lookup") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o ConfidenceScoreLookupSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(confidenceScoreLookupBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), confidenceScoreLookupPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"confidence_score_lookup\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, confidenceScoreLookupPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from confidenceScoreLookup slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for confidence_score_lookup") + } + + if len(confidenceScoreLookupAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *ConfidenceScoreLookup) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindConfidenceScoreLookup(ctx, exec, o.ConfidenceScore) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *ConfidenceScoreLookupSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := ConfidenceScoreLookupSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), confidenceScoreLookupPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"confidence_score_lookup\".* FROM \"confidence_score_lookup\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, confidenceScoreLookupPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in ConfidenceScoreLookupSlice") + } + + *o = slice + + return nil +} + +// ConfidenceScoreLookupExists checks if the ConfidenceScoreLookup row exists. +func ConfidenceScoreLookupExists(ctx context.Context, exec boil.ContextExecutor, confidenceScore int16) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"confidence_score_lookup\" where \"confidence_score\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, confidenceScore) + } + row := exec.QueryRowContext(ctx, sql, confidenceScore) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if confidence_score_lookup exists") + } + + return exists, nil +} diff --git a/models/curation_lookup.go b/models/curation_lookup.go new file mode 100644 index 0000000..7e2b7a6 --- /dev/null +++ b/models/curation_lookup.go @@ -0,0 +1,1139 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// CurationLookup is an object representing the database table. +type CurationLookup struct { + CuratedBy string `boil:"curated_by" json:"curated_by" toml:"curated_by" yaml:"curated_by"` + Description string `boil:"description" json:"description" toml:"description" yaml:"description"` + + R *curationLookupR `boil:"-" json:"-" toml:"-" yaml:"-"` + L curationLookupL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var CurationLookupColumns = struct { + CuratedBy string + Description string +}{ + CuratedBy: "curated_by", + Description: "description", +} + +var CurationLookupTableColumns = struct { + CuratedBy string + Description string +}{ + CuratedBy: "curation_lookup.curated_by", + Description: "curation_lookup.description", +} + +// Generated where + +var CurationLookupWhere = struct { + CuratedBy whereHelperstring + Description whereHelperstring +}{ + CuratedBy: whereHelperstring{field: "\"curation_lookup\".\"curated_by\""}, + Description: whereHelperstring{field: "\"curation_lookup\".\"description\""}, +} + +// CurationLookupRels is where relationship names are stored. +var CurationLookupRels = struct { + CuratedByAssays string +}{ + CuratedByAssays: "CuratedByAssays", +} + +// curationLookupR is where relationships are stored. +type curationLookupR struct { + CuratedByAssays AssaySlice `boil:"CuratedByAssays" json:"CuratedByAssays" toml:"CuratedByAssays" yaml:"CuratedByAssays"` +} + +// NewStruct creates a new relationship struct +func (*curationLookupR) NewStruct() *curationLookupR { + return &curationLookupR{} +} + +func (r *curationLookupR) GetCuratedByAssays() AssaySlice { + if r == nil { + return nil + } + return r.CuratedByAssays +} + +// curationLookupL is where Load methods for each relationship are stored. +type curationLookupL struct{} + +var ( + curationLookupAllColumns = []string{"curated_by", "description"} + curationLookupColumnsWithoutDefault = []string{"curated_by", "description"} + curationLookupColumnsWithDefault = []string{} + curationLookupPrimaryKeyColumns = []string{"curated_by"} + curationLookupGeneratedColumns = []string{} +) + +type ( + // CurationLookupSlice is an alias for a slice of pointers to CurationLookup. + // This should almost always be used instead of []CurationLookup. + CurationLookupSlice []*CurationLookup + // CurationLookupHook is the signature for custom CurationLookup hook methods + CurationLookupHook func(context.Context, boil.ContextExecutor, *CurationLookup) error + + curationLookupQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + curationLookupType = reflect.TypeOf(&CurationLookup{}) + curationLookupMapping = queries.MakeStructMapping(curationLookupType) + curationLookupPrimaryKeyMapping, _ = queries.BindMapping(curationLookupType, curationLookupMapping, curationLookupPrimaryKeyColumns) + curationLookupInsertCacheMut sync.RWMutex + curationLookupInsertCache = make(map[string]insertCache) + curationLookupUpdateCacheMut sync.RWMutex + curationLookupUpdateCache = make(map[string]updateCache) + curationLookupUpsertCacheMut sync.RWMutex + curationLookupUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var curationLookupAfterSelectHooks []CurationLookupHook + +var curationLookupBeforeInsertHooks []CurationLookupHook +var curationLookupAfterInsertHooks []CurationLookupHook + +var curationLookupBeforeUpdateHooks []CurationLookupHook +var curationLookupAfterUpdateHooks []CurationLookupHook + +var curationLookupBeforeDeleteHooks []CurationLookupHook +var curationLookupAfterDeleteHooks []CurationLookupHook + +var curationLookupBeforeUpsertHooks []CurationLookupHook +var curationLookupAfterUpsertHooks []CurationLookupHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *CurationLookup) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range curationLookupAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *CurationLookup) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range curationLookupBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *CurationLookup) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range curationLookupAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *CurationLookup) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range curationLookupBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *CurationLookup) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range curationLookupAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *CurationLookup) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range curationLookupBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *CurationLookup) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range curationLookupAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *CurationLookup) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range curationLookupBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *CurationLookup) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range curationLookupAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddCurationLookupHook registers your hook function for all future operations. +func AddCurationLookupHook(hookPoint boil.HookPoint, curationLookupHook CurationLookupHook) { + switch hookPoint { + case boil.AfterSelectHook: + curationLookupAfterSelectHooks = append(curationLookupAfterSelectHooks, curationLookupHook) + case boil.BeforeInsertHook: + curationLookupBeforeInsertHooks = append(curationLookupBeforeInsertHooks, curationLookupHook) + case boil.AfterInsertHook: + curationLookupAfterInsertHooks = append(curationLookupAfterInsertHooks, curationLookupHook) + case boil.BeforeUpdateHook: + curationLookupBeforeUpdateHooks = append(curationLookupBeforeUpdateHooks, curationLookupHook) + case boil.AfterUpdateHook: + curationLookupAfterUpdateHooks = append(curationLookupAfterUpdateHooks, curationLookupHook) + case boil.BeforeDeleteHook: + curationLookupBeforeDeleteHooks = append(curationLookupBeforeDeleteHooks, curationLookupHook) + case boil.AfterDeleteHook: + curationLookupAfterDeleteHooks = append(curationLookupAfterDeleteHooks, curationLookupHook) + case boil.BeforeUpsertHook: + curationLookupBeforeUpsertHooks = append(curationLookupBeforeUpsertHooks, curationLookupHook) + case boil.AfterUpsertHook: + curationLookupAfterUpsertHooks = append(curationLookupAfterUpsertHooks, curationLookupHook) + } +} + +// One returns a single curationLookup record from the query. +func (q curationLookupQuery) One(ctx context.Context, exec boil.ContextExecutor) (*CurationLookup, error) { + o := &CurationLookup{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for curation_lookup") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all CurationLookup records from the query. +func (q curationLookupQuery) All(ctx context.Context, exec boil.ContextExecutor) (CurationLookupSlice, error) { + var o []*CurationLookup + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to CurationLookup slice") + } + + if len(curationLookupAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all CurationLookup records in the query. +func (q curationLookupQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count curation_lookup rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q curationLookupQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if curation_lookup exists") + } + + return count > 0, nil +} + +// CuratedByAssays retrieves all the assay's Assays with an executor via curated_by column. +func (o *CurationLookup) CuratedByAssays(mods ...qm.QueryMod) assayQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"assays\".\"curated_by\"=?", o.CuratedBy), + ) + + return Assays(queryMods...) +} + +// LoadCuratedByAssays allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (curationLookupL) LoadCuratedByAssays(ctx context.Context, e boil.ContextExecutor, singular bool, maybeCurationLookup interface{}, mods queries.Applicator) error { + var slice []*CurationLookup + var object *CurationLookup + + if singular { + object = maybeCurationLookup.(*CurationLookup) + } else { + slice = *maybeCurationLookup.(*[]*CurationLookup) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &curationLookupR{} + } + args = append(args, object.CuratedBy) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &curationLookupR{} + } + + for _, a := range args { + if queries.Equal(a, obj.CuratedBy) { + continue Outer + } + } + + args = append(args, obj.CuratedBy) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`assays`), + qm.WhereIn(`assays.curated_by in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load assays") + } + + var resultSlice []*Assay + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice assays") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on assays") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for assays") + } + + if len(assayAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.CuratedByAssays = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &assayR{} + } + foreign.R.CuratedByCurationLookup = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.CuratedBy, foreign.CuratedBy) { + local.R.CuratedByAssays = append(local.R.CuratedByAssays, foreign) + if foreign.R == nil { + foreign.R = &assayR{} + } + foreign.R.CuratedByCurationLookup = local + break + } + } + } + + return nil +} + +// AddCuratedByAssays adds the given related objects to the existing relationships +// of the curation_lookup, optionally inserting them as new records. +// Appends related to o.R.CuratedByAssays. +// Sets related.R.CuratedByCurationLookup appropriately. +func (o *CurationLookup) AddCuratedByAssays(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Assay) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.CuratedBy, o.CuratedBy) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"assays\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"curated_by"}), + strmangle.WhereClause("\"", "\"", 0, assayPrimaryKeyColumns), + ) + values := []interface{}{o.CuratedBy, rel.AssayID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.CuratedBy, o.CuratedBy) + } + } + + if o.R == nil { + o.R = &curationLookupR{ + CuratedByAssays: related, + } + } else { + o.R.CuratedByAssays = append(o.R.CuratedByAssays, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &assayR{ + CuratedByCurationLookup: o, + } + } else { + rel.R.CuratedByCurationLookup = o + } + } + return nil +} + +// SetCuratedByAssays removes all previously related items of the +// curation_lookup replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.CuratedByCurationLookup's CuratedByAssays accordingly. +// Replaces o.R.CuratedByAssays with related. +// Sets related.R.CuratedByCurationLookup's CuratedByAssays accordingly. +func (o *CurationLookup) SetCuratedByAssays(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Assay) error { + query := "update \"assays\" set \"curated_by\" = null where \"curated_by\" = ?" + values := []interface{}{o.CuratedBy} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + if o.R != nil { + for _, rel := range o.R.CuratedByAssays { + queries.SetScanner(&rel.CuratedBy, nil) + if rel.R == nil { + continue + } + + rel.R.CuratedByCurationLookup = nil + } + o.R.CuratedByAssays = nil + } + + return o.AddCuratedByAssays(ctx, exec, insert, related...) +} + +// RemoveCuratedByAssays relationships from objects passed in. +// Removes related items from R.CuratedByAssays (uses pointer comparison, removal does not keep order) +// Sets related.R.CuratedByCurationLookup. +func (o *CurationLookup) RemoveCuratedByAssays(ctx context.Context, exec boil.ContextExecutor, related ...*Assay) error { + if len(related) == 0 { + return nil + } + + var err error + for _, rel := range related { + queries.SetScanner(&rel.CuratedBy, nil) + if rel.R != nil { + rel.R.CuratedByCurationLookup = nil + } + if _, err = rel.Update(ctx, exec, boil.Whitelist("curated_by")); err != nil { + return err + } + } + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.CuratedByAssays { + if rel != ri { + continue + } + + ln := len(o.R.CuratedByAssays) + if ln > 1 && i < ln-1 { + o.R.CuratedByAssays[i] = o.R.CuratedByAssays[ln-1] + } + o.R.CuratedByAssays = o.R.CuratedByAssays[:ln-1] + break + } + } + + return nil +} + +// CurationLookups retrieves all the records using an executor. +func CurationLookups(mods ...qm.QueryMod) curationLookupQuery { + mods = append(mods, qm.From("\"curation_lookup\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"curation_lookup\".*"}) + } + + return curationLookupQuery{q} +} + +// FindCurationLookup retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindCurationLookup(ctx context.Context, exec boil.ContextExecutor, curatedBy string, selectCols ...string) (*CurationLookup, error) { + curationLookupObj := &CurationLookup{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"curation_lookup\" where \"curated_by\"=?", sel, + ) + + q := queries.Raw(query, curatedBy) + + err := q.Bind(ctx, exec, curationLookupObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from curation_lookup") + } + + if err = curationLookupObj.doAfterSelectHooks(ctx, exec); err != nil { + return curationLookupObj, err + } + + return curationLookupObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *CurationLookup) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no curation_lookup provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(curationLookupColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + curationLookupInsertCacheMut.RLock() + cache, cached := curationLookupInsertCache[key] + curationLookupInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + curationLookupAllColumns, + curationLookupColumnsWithDefault, + curationLookupColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(curationLookupType, curationLookupMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(curationLookupType, curationLookupMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"curation_lookup\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"curation_lookup\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into curation_lookup") + } + + if !cached { + curationLookupInsertCacheMut.Lock() + curationLookupInsertCache[key] = cache + curationLookupInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the CurationLookup. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *CurationLookup) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + curationLookupUpdateCacheMut.RLock() + cache, cached := curationLookupUpdateCache[key] + curationLookupUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + curationLookupAllColumns, + curationLookupPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update curation_lookup, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"curation_lookup\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, curationLookupPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(curationLookupType, curationLookupMapping, append(wl, curationLookupPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update curation_lookup row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for curation_lookup") + } + + if !cached { + curationLookupUpdateCacheMut.Lock() + curationLookupUpdateCache[key] = cache + curationLookupUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q curationLookupQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for curation_lookup") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for curation_lookup") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o CurationLookupSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), curationLookupPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"curation_lookup\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, curationLookupPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in curationLookup slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all curationLookup") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *CurationLookup) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no curation_lookup provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(curationLookupColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + curationLookupUpsertCacheMut.RLock() + cache, cached := curationLookupUpsertCache[key] + curationLookupUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + curationLookupAllColumns, + curationLookupColumnsWithDefault, + curationLookupColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + curationLookupAllColumns, + curationLookupPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert curation_lookup, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(curationLookupPrimaryKeyColumns)) + copy(conflict, curationLookupPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"curation_lookup\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(curationLookupType, curationLookupMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(curationLookupType, curationLookupMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert curation_lookup") + } + + if !cached { + curationLookupUpsertCacheMut.Lock() + curationLookupUpsertCache[key] = cache + curationLookupUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single CurationLookup record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *CurationLookup) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no CurationLookup provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), curationLookupPrimaryKeyMapping) + sql := "DELETE FROM \"curation_lookup\" WHERE \"curated_by\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from curation_lookup") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for curation_lookup") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q curationLookupQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no curationLookupQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from curation_lookup") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for curation_lookup") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o CurationLookupSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(curationLookupBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), curationLookupPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"curation_lookup\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, curationLookupPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from curationLookup slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for curation_lookup") + } + + if len(curationLookupAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *CurationLookup) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindCurationLookup(ctx, exec, o.CuratedBy) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *CurationLookupSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := CurationLookupSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), curationLookupPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"curation_lookup\".* FROM \"curation_lookup\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, curationLookupPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in CurationLookupSlice") + } + + *o = slice + + return nil +} + +// CurationLookupExists checks if the CurationLookup row exists. +func CurationLookupExists(ctx context.Context, exec boil.ContextExecutor, curatedBy string) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"curation_lookup\" where \"curated_by\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, curatedBy) + } + row := exec.QueryRowContext(ctx, sql, curatedBy) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if curation_lookup exists") + } + + return exists, nil +} diff --git a/models/data_validity_lookup.go b/models/data_validity_lookup.go new file mode 100644 index 0000000..9b93587 --- /dev/null +++ b/models/data_validity_lookup.go @@ -0,0 +1,1140 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// DataValidityLookup is an object representing the database table. +type DataValidityLookup struct { + DataValidityComment string `boil:"data_validity_comment" json:"data_validity_comment" toml:"data_validity_comment" yaml:"data_validity_comment"` + Description null.String `boil:"description" json:"description,omitempty" toml:"description" yaml:"description,omitempty"` + + R *dataValidityLookupR `boil:"-" json:"-" toml:"-" yaml:"-"` + L dataValidityLookupL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var DataValidityLookupColumns = struct { + DataValidityComment string + Description string +}{ + DataValidityComment: "data_validity_comment", + Description: "description", +} + +var DataValidityLookupTableColumns = struct { + DataValidityComment string + Description string +}{ + DataValidityComment: "data_validity_lookup.data_validity_comment", + Description: "data_validity_lookup.description", +} + +// Generated where + +var DataValidityLookupWhere = struct { + DataValidityComment whereHelperstring + Description whereHelpernull_String +}{ + DataValidityComment: whereHelperstring{field: "\"data_validity_lookup\".\"data_validity_comment\""}, + Description: whereHelpernull_String{field: "\"data_validity_lookup\".\"description\""}, +} + +// DataValidityLookupRels is where relationship names are stored. +var DataValidityLookupRels = struct { + DataValidityCommentActivities string +}{ + DataValidityCommentActivities: "DataValidityCommentActivities", +} + +// dataValidityLookupR is where relationships are stored. +type dataValidityLookupR struct { + DataValidityCommentActivities ActivitySlice `boil:"DataValidityCommentActivities" json:"DataValidityCommentActivities" toml:"DataValidityCommentActivities" yaml:"DataValidityCommentActivities"` +} + +// NewStruct creates a new relationship struct +func (*dataValidityLookupR) NewStruct() *dataValidityLookupR { + return &dataValidityLookupR{} +} + +func (r *dataValidityLookupR) GetDataValidityCommentActivities() ActivitySlice { + if r == nil { + return nil + } + return r.DataValidityCommentActivities +} + +// dataValidityLookupL is where Load methods for each relationship are stored. +type dataValidityLookupL struct{} + +var ( + dataValidityLookupAllColumns = []string{"data_validity_comment", "description"} + dataValidityLookupColumnsWithoutDefault = []string{"data_validity_comment"} + dataValidityLookupColumnsWithDefault = []string{"description"} + dataValidityLookupPrimaryKeyColumns = []string{"data_validity_comment"} + dataValidityLookupGeneratedColumns = []string{} +) + +type ( + // DataValidityLookupSlice is an alias for a slice of pointers to DataValidityLookup. + // This should almost always be used instead of []DataValidityLookup. + DataValidityLookupSlice []*DataValidityLookup + // DataValidityLookupHook is the signature for custom DataValidityLookup hook methods + DataValidityLookupHook func(context.Context, boil.ContextExecutor, *DataValidityLookup) error + + dataValidityLookupQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + dataValidityLookupType = reflect.TypeOf(&DataValidityLookup{}) + dataValidityLookupMapping = queries.MakeStructMapping(dataValidityLookupType) + dataValidityLookupPrimaryKeyMapping, _ = queries.BindMapping(dataValidityLookupType, dataValidityLookupMapping, dataValidityLookupPrimaryKeyColumns) + dataValidityLookupInsertCacheMut sync.RWMutex + dataValidityLookupInsertCache = make(map[string]insertCache) + dataValidityLookupUpdateCacheMut sync.RWMutex + dataValidityLookupUpdateCache = make(map[string]updateCache) + dataValidityLookupUpsertCacheMut sync.RWMutex + dataValidityLookupUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var dataValidityLookupAfterSelectHooks []DataValidityLookupHook + +var dataValidityLookupBeforeInsertHooks []DataValidityLookupHook +var dataValidityLookupAfterInsertHooks []DataValidityLookupHook + +var dataValidityLookupBeforeUpdateHooks []DataValidityLookupHook +var dataValidityLookupAfterUpdateHooks []DataValidityLookupHook + +var dataValidityLookupBeforeDeleteHooks []DataValidityLookupHook +var dataValidityLookupAfterDeleteHooks []DataValidityLookupHook + +var dataValidityLookupBeforeUpsertHooks []DataValidityLookupHook +var dataValidityLookupAfterUpsertHooks []DataValidityLookupHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *DataValidityLookup) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range dataValidityLookupAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *DataValidityLookup) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range dataValidityLookupBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *DataValidityLookup) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range dataValidityLookupAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *DataValidityLookup) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range dataValidityLookupBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *DataValidityLookup) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range dataValidityLookupAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *DataValidityLookup) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range dataValidityLookupBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *DataValidityLookup) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range dataValidityLookupAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *DataValidityLookup) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range dataValidityLookupBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *DataValidityLookup) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range dataValidityLookupAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddDataValidityLookupHook registers your hook function for all future operations. +func AddDataValidityLookupHook(hookPoint boil.HookPoint, dataValidityLookupHook DataValidityLookupHook) { + switch hookPoint { + case boil.AfterSelectHook: + dataValidityLookupAfterSelectHooks = append(dataValidityLookupAfterSelectHooks, dataValidityLookupHook) + case boil.BeforeInsertHook: + dataValidityLookupBeforeInsertHooks = append(dataValidityLookupBeforeInsertHooks, dataValidityLookupHook) + case boil.AfterInsertHook: + dataValidityLookupAfterInsertHooks = append(dataValidityLookupAfterInsertHooks, dataValidityLookupHook) + case boil.BeforeUpdateHook: + dataValidityLookupBeforeUpdateHooks = append(dataValidityLookupBeforeUpdateHooks, dataValidityLookupHook) + case boil.AfterUpdateHook: + dataValidityLookupAfterUpdateHooks = append(dataValidityLookupAfterUpdateHooks, dataValidityLookupHook) + case boil.BeforeDeleteHook: + dataValidityLookupBeforeDeleteHooks = append(dataValidityLookupBeforeDeleteHooks, dataValidityLookupHook) + case boil.AfterDeleteHook: + dataValidityLookupAfterDeleteHooks = append(dataValidityLookupAfterDeleteHooks, dataValidityLookupHook) + case boil.BeforeUpsertHook: + dataValidityLookupBeforeUpsertHooks = append(dataValidityLookupBeforeUpsertHooks, dataValidityLookupHook) + case boil.AfterUpsertHook: + dataValidityLookupAfterUpsertHooks = append(dataValidityLookupAfterUpsertHooks, dataValidityLookupHook) + } +} + +// One returns a single dataValidityLookup record from the query. +func (q dataValidityLookupQuery) One(ctx context.Context, exec boil.ContextExecutor) (*DataValidityLookup, error) { + o := &DataValidityLookup{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for data_validity_lookup") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all DataValidityLookup records from the query. +func (q dataValidityLookupQuery) All(ctx context.Context, exec boil.ContextExecutor) (DataValidityLookupSlice, error) { + var o []*DataValidityLookup + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to DataValidityLookup slice") + } + + if len(dataValidityLookupAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all DataValidityLookup records in the query. +func (q dataValidityLookupQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count data_validity_lookup rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q dataValidityLookupQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if data_validity_lookup exists") + } + + return count > 0, nil +} + +// DataValidityCommentActivities retrieves all the activity's Activities with an executor via data_validity_comment column. +func (o *DataValidityLookup) DataValidityCommentActivities(mods ...qm.QueryMod) activityQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"activities\".\"data_validity_comment\"=?", o.DataValidityComment), + ) + + return Activities(queryMods...) +} + +// LoadDataValidityCommentActivities allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (dataValidityLookupL) LoadDataValidityCommentActivities(ctx context.Context, e boil.ContextExecutor, singular bool, maybeDataValidityLookup interface{}, mods queries.Applicator) error { + var slice []*DataValidityLookup + var object *DataValidityLookup + + if singular { + object = maybeDataValidityLookup.(*DataValidityLookup) + } else { + slice = *maybeDataValidityLookup.(*[]*DataValidityLookup) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &dataValidityLookupR{} + } + args = append(args, object.DataValidityComment) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &dataValidityLookupR{} + } + + for _, a := range args { + if queries.Equal(a, obj.DataValidityComment) { + continue Outer + } + } + + args = append(args, obj.DataValidityComment) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`activities`), + qm.WhereIn(`activities.data_validity_comment in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load activities") + } + + var resultSlice []*Activity + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice activities") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on activities") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for activities") + } + + if len(activityAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.DataValidityCommentActivities = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &activityR{} + } + foreign.R.DataValidityCommentDataValidityLookup = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.DataValidityComment, foreign.DataValidityComment) { + local.R.DataValidityCommentActivities = append(local.R.DataValidityCommentActivities, foreign) + if foreign.R == nil { + foreign.R = &activityR{} + } + foreign.R.DataValidityCommentDataValidityLookup = local + break + } + } + } + + return nil +} + +// AddDataValidityCommentActivities adds the given related objects to the existing relationships +// of the data_validity_lookup, optionally inserting them as new records. +// Appends related to o.R.DataValidityCommentActivities. +// Sets related.R.DataValidityCommentDataValidityLookup appropriately. +func (o *DataValidityLookup) AddDataValidityCommentActivities(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Activity) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.DataValidityComment, o.DataValidityComment) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"activities\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"data_validity_comment"}), + strmangle.WhereClause("\"", "\"", 0, activityPrimaryKeyColumns), + ) + values := []interface{}{o.DataValidityComment, rel.ActivityID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.DataValidityComment, o.DataValidityComment) + } + } + + if o.R == nil { + o.R = &dataValidityLookupR{ + DataValidityCommentActivities: related, + } + } else { + o.R.DataValidityCommentActivities = append(o.R.DataValidityCommentActivities, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &activityR{ + DataValidityCommentDataValidityLookup: o, + } + } else { + rel.R.DataValidityCommentDataValidityLookup = o + } + } + return nil +} + +// SetDataValidityCommentActivities removes all previously related items of the +// data_validity_lookup replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.DataValidityCommentDataValidityLookup's DataValidityCommentActivities accordingly. +// Replaces o.R.DataValidityCommentActivities with related. +// Sets related.R.DataValidityCommentDataValidityLookup's DataValidityCommentActivities accordingly. +func (o *DataValidityLookup) SetDataValidityCommentActivities(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Activity) error { + query := "update \"activities\" set \"data_validity_comment\" = null where \"data_validity_comment\" = ?" + values := []interface{}{o.DataValidityComment} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + if o.R != nil { + for _, rel := range o.R.DataValidityCommentActivities { + queries.SetScanner(&rel.DataValidityComment, nil) + if rel.R == nil { + continue + } + + rel.R.DataValidityCommentDataValidityLookup = nil + } + o.R.DataValidityCommentActivities = nil + } + + return o.AddDataValidityCommentActivities(ctx, exec, insert, related...) +} + +// RemoveDataValidityCommentActivities relationships from objects passed in. +// Removes related items from R.DataValidityCommentActivities (uses pointer comparison, removal does not keep order) +// Sets related.R.DataValidityCommentDataValidityLookup. +func (o *DataValidityLookup) RemoveDataValidityCommentActivities(ctx context.Context, exec boil.ContextExecutor, related ...*Activity) error { + if len(related) == 0 { + return nil + } + + var err error + for _, rel := range related { + queries.SetScanner(&rel.DataValidityComment, nil) + if rel.R != nil { + rel.R.DataValidityCommentDataValidityLookup = nil + } + if _, err = rel.Update(ctx, exec, boil.Whitelist("data_validity_comment")); err != nil { + return err + } + } + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.DataValidityCommentActivities { + if rel != ri { + continue + } + + ln := len(o.R.DataValidityCommentActivities) + if ln > 1 && i < ln-1 { + o.R.DataValidityCommentActivities[i] = o.R.DataValidityCommentActivities[ln-1] + } + o.R.DataValidityCommentActivities = o.R.DataValidityCommentActivities[:ln-1] + break + } + } + + return nil +} + +// DataValidityLookups retrieves all the records using an executor. +func DataValidityLookups(mods ...qm.QueryMod) dataValidityLookupQuery { + mods = append(mods, qm.From("\"data_validity_lookup\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"data_validity_lookup\".*"}) + } + + return dataValidityLookupQuery{q} +} + +// FindDataValidityLookup retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindDataValidityLookup(ctx context.Context, exec boil.ContextExecutor, dataValidityComment string, selectCols ...string) (*DataValidityLookup, error) { + dataValidityLookupObj := &DataValidityLookup{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"data_validity_lookup\" where \"data_validity_comment\"=?", sel, + ) + + q := queries.Raw(query, dataValidityComment) + + err := q.Bind(ctx, exec, dataValidityLookupObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from data_validity_lookup") + } + + if err = dataValidityLookupObj.doAfterSelectHooks(ctx, exec); err != nil { + return dataValidityLookupObj, err + } + + return dataValidityLookupObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *DataValidityLookup) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no data_validity_lookup provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(dataValidityLookupColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + dataValidityLookupInsertCacheMut.RLock() + cache, cached := dataValidityLookupInsertCache[key] + dataValidityLookupInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + dataValidityLookupAllColumns, + dataValidityLookupColumnsWithDefault, + dataValidityLookupColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(dataValidityLookupType, dataValidityLookupMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(dataValidityLookupType, dataValidityLookupMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"data_validity_lookup\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"data_validity_lookup\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into data_validity_lookup") + } + + if !cached { + dataValidityLookupInsertCacheMut.Lock() + dataValidityLookupInsertCache[key] = cache + dataValidityLookupInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the DataValidityLookup. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *DataValidityLookup) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + dataValidityLookupUpdateCacheMut.RLock() + cache, cached := dataValidityLookupUpdateCache[key] + dataValidityLookupUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + dataValidityLookupAllColumns, + dataValidityLookupPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update data_validity_lookup, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"data_validity_lookup\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, dataValidityLookupPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(dataValidityLookupType, dataValidityLookupMapping, append(wl, dataValidityLookupPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update data_validity_lookup row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for data_validity_lookup") + } + + if !cached { + dataValidityLookupUpdateCacheMut.Lock() + dataValidityLookupUpdateCache[key] = cache + dataValidityLookupUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q dataValidityLookupQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for data_validity_lookup") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for data_validity_lookup") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o DataValidityLookupSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), dataValidityLookupPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"data_validity_lookup\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, dataValidityLookupPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in dataValidityLookup slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all dataValidityLookup") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *DataValidityLookup) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no data_validity_lookup provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(dataValidityLookupColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + dataValidityLookupUpsertCacheMut.RLock() + cache, cached := dataValidityLookupUpsertCache[key] + dataValidityLookupUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + dataValidityLookupAllColumns, + dataValidityLookupColumnsWithDefault, + dataValidityLookupColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + dataValidityLookupAllColumns, + dataValidityLookupPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert data_validity_lookup, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(dataValidityLookupPrimaryKeyColumns)) + copy(conflict, dataValidityLookupPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"data_validity_lookup\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(dataValidityLookupType, dataValidityLookupMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(dataValidityLookupType, dataValidityLookupMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert data_validity_lookup") + } + + if !cached { + dataValidityLookupUpsertCacheMut.Lock() + dataValidityLookupUpsertCache[key] = cache + dataValidityLookupUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single DataValidityLookup record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *DataValidityLookup) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no DataValidityLookup provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), dataValidityLookupPrimaryKeyMapping) + sql := "DELETE FROM \"data_validity_lookup\" WHERE \"data_validity_comment\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from data_validity_lookup") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for data_validity_lookup") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q dataValidityLookupQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no dataValidityLookupQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from data_validity_lookup") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for data_validity_lookup") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o DataValidityLookupSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(dataValidityLookupBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), dataValidityLookupPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"data_validity_lookup\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, dataValidityLookupPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from dataValidityLookup slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for data_validity_lookup") + } + + if len(dataValidityLookupAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *DataValidityLookup) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindDataValidityLookup(ctx, exec, o.DataValidityComment) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *DataValidityLookupSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := DataValidityLookupSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), dataValidityLookupPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"data_validity_lookup\".* FROM \"data_validity_lookup\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, dataValidityLookupPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in DataValidityLookupSlice") + } + + *o = slice + + return nil +} + +// DataValidityLookupExists checks if the DataValidityLookup row exists. +func DataValidityLookupExists(ctx context.Context, exec boil.ContextExecutor, dataValidityComment string) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"data_validity_lookup\" where \"data_validity_comment\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, dataValidityComment) + } + row := exec.QueryRowContext(ctx, sql, dataValidityComment) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if data_validity_lookup exists") + } + + return exists, nil +} diff --git a/models/defined_daily_dose.go b/models/defined_daily_dose.go new file mode 100644 index 0000000..66825aa --- /dev/null +++ b/models/defined_daily_dose.go @@ -0,0 +1,1092 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/sqlboiler/v4/types" + "github.com/volatiletech/strmangle" +) + +// DefinedDailyDose is an object representing the database table. +type DefinedDailyDose struct { + AtcCode string `boil:"atc_code" json:"atc_code" toml:"atc_code" yaml:"atc_code"` + DDDUnits null.String `boil:"ddd_units" json:"ddd_units,omitempty" toml:"ddd_units" yaml:"ddd_units,omitempty"` + DDDAdmr null.String `boil:"ddd_admr" json:"ddd_admr,omitempty" toml:"ddd_admr" yaml:"ddd_admr,omitempty"` + DDDComment null.String `boil:"ddd_comment" json:"ddd_comment,omitempty" toml:"ddd_comment" yaml:"ddd_comment,omitempty"` + DDDID int64 `boil:"ddd_id" json:"ddd_id" toml:"ddd_id" yaml:"ddd_id"` + DDDValue types.NullDecimal `boil:"ddd_value" json:"ddd_value,omitempty" toml:"ddd_value" yaml:"ddd_value,omitempty"` + + R *definedDailyDoseR `boil:"-" json:"-" toml:"-" yaml:"-"` + L definedDailyDoseL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var DefinedDailyDoseColumns = struct { + AtcCode string + DDDUnits string + DDDAdmr string + DDDComment string + DDDID string + DDDValue string +}{ + AtcCode: "atc_code", + DDDUnits: "ddd_units", + DDDAdmr: "ddd_admr", + DDDComment: "ddd_comment", + DDDID: "ddd_id", + DDDValue: "ddd_value", +} + +var DefinedDailyDoseTableColumns = struct { + AtcCode string + DDDUnits string + DDDAdmr string + DDDComment string + DDDID string + DDDValue string +}{ + AtcCode: "defined_daily_dose.atc_code", + DDDUnits: "defined_daily_dose.ddd_units", + DDDAdmr: "defined_daily_dose.ddd_admr", + DDDComment: "defined_daily_dose.ddd_comment", + DDDID: "defined_daily_dose.ddd_id", + DDDValue: "defined_daily_dose.ddd_value", +} + +// Generated where + +var DefinedDailyDoseWhere = struct { + AtcCode whereHelperstring + DDDUnits whereHelpernull_String + DDDAdmr whereHelpernull_String + DDDComment whereHelpernull_String + DDDID whereHelperint64 + DDDValue whereHelpertypes_NullDecimal +}{ + AtcCode: whereHelperstring{field: "\"defined_daily_dose\".\"atc_code\""}, + DDDUnits: whereHelpernull_String{field: "\"defined_daily_dose\".\"ddd_units\""}, + DDDAdmr: whereHelpernull_String{field: "\"defined_daily_dose\".\"ddd_admr\""}, + DDDComment: whereHelpernull_String{field: "\"defined_daily_dose\".\"ddd_comment\""}, + DDDID: whereHelperint64{field: "\"defined_daily_dose\".\"ddd_id\""}, + DDDValue: whereHelpertypes_NullDecimal{field: "\"defined_daily_dose\".\"ddd_value\""}, +} + +// DefinedDailyDoseRels is where relationship names are stored. +var DefinedDailyDoseRels = struct { + AtcCodeAtcClassification string +}{ + AtcCodeAtcClassification: "AtcCodeAtcClassification", +} + +// definedDailyDoseR is where relationships are stored. +type definedDailyDoseR struct { + AtcCodeAtcClassification *AtcClassification `boil:"AtcCodeAtcClassification" json:"AtcCodeAtcClassification" toml:"AtcCodeAtcClassification" yaml:"AtcCodeAtcClassification"` +} + +// NewStruct creates a new relationship struct +func (*definedDailyDoseR) NewStruct() *definedDailyDoseR { + return &definedDailyDoseR{} +} + +func (r *definedDailyDoseR) GetAtcCodeAtcClassification() *AtcClassification { + if r == nil { + return nil + } + return r.AtcCodeAtcClassification +} + +// definedDailyDoseL is where Load methods for each relationship are stored. +type definedDailyDoseL struct{} + +var ( + definedDailyDoseAllColumns = []string{"atc_code", "ddd_units", "ddd_admr", "ddd_comment", "ddd_id", "ddd_value"} + definedDailyDoseColumnsWithoutDefault = []string{"atc_code", "ddd_id"} + definedDailyDoseColumnsWithDefault = []string{"ddd_units", "ddd_admr", "ddd_comment", "ddd_value"} + definedDailyDosePrimaryKeyColumns = []string{"ddd_id"} + definedDailyDoseGeneratedColumns = []string{} +) + +type ( + // DefinedDailyDoseSlice is an alias for a slice of pointers to DefinedDailyDose. + // This should almost always be used instead of []DefinedDailyDose. + DefinedDailyDoseSlice []*DefinedDailyDose + // DefinedDailyDoseHook is the signature for custom DefinedDailyDose hook methods + DefinedDailyDoseHook func(context.Context, boil.ContextExecutor, *DefinedDailyDose) error + + definedDailyDoseQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + definedDailyDoseType = reflect.TypeOf(&DefinedDailyDose{}) + definedDailyDoseMapping = queries.MakeStructMapping(definedDailyDoseType) + definedDailyDosePrimaryKeyMapping, _ = queries.BindMapping(definedDailyDoseType, definedDailyDoseMapping, definedDailyDosePrimaryKeyColumns) + definedDailyDoseInsertCacheMut sync.RWMutex + definedDailyDoseInsertCache = make(map[string]insertCache) + definedDailyDoseUpdateCacheMut sync.RWMutex + definedDailyDoseUpdateCache = make(map[string]updateCache) + definedDailyDoseUpsertCacheMut sync.RWMutex + definedDailyDoseUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var definedDailyDoseAfterSelectHooks []DefinedDailyDoseHook + +var definedDailyDoseBeforeInsertHooks []DefinedDailyDoseHook +var definedDailyDoseAfterInsertHooks []DefinedDailyDoseHook + +var definedDailyDoseBeforeUpdateHooks []DefinedDailyDoseHook +var definedDailyDoseAfterUpdateHooks []DefinedDailyDoseHook + +var definedDailyDoseBeforeDeleteHooks []DefinedDailyDoseHook +var definedDailyDoseAfterDeleteHooks []DefinedDailyDoseHook + +var definedDailyDoseBeforeUpsertHooks []DefinedDailyDoseHook +var definedDailyDoseAfterUpsertHooks []DefinedDailyDoseHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *DefinedDailyDose) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range definedDailyDoseAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *DefinedDailyDose) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range definedDailyDoseBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *DefinedDailyDose) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range definedDailyDoseAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *DefinedDailyDose) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range definedDailyDoseBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *DefinedDailyDose) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range definedDailyDoseAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *DefinedDailyDose) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range definedDailyDoseBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *DefinedDailyDose) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range definedDailyDoseAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *DefinedDailyDose) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range definedDailyDoseBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *DefinedDailyDose) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range definedDailyDoseAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddDefinedDailyDoseHook registers your hook function for all future operations. +func AddDefinedDailyDoseHook(hookPoint boil.HookPoint, definedDailyDoseHook DefinedDailyDoseHook) { + switch hookPoint { + case boil.AfterSelectHook: + definedDailyDoseAfterSelectHooks = append(definedDailyDoseAfterSelectHooks, definedDailyDoseHook) + case boil.BeforeInsertHook: + definedDailyDoseBeforeInsertHooks = append(definedDailyDoseBeforeInsertHooks, definedDailyDoseHook) + case boil.AfterInsertHook: + definedDailyDoseAfterInsertHooks = append(definedDailyDoseAfterInsertHooks, definedDailyDoseHook) + case boil.BeforeUpdateHook: + definedDailyDoseBeforeUpdateHooks = append(definedDailyDoseBeforeUpdateHooks, definedDailyDoseHook) + case boil.AfterUpdateHook: + definedDailyDoseAfterUpdateHooks = append(definedDailyDoseAfterUpdateHooks, definedDailyDoseHook) + case boil.BeforeDeleteHook: + definedDailyDoseBeforeDeleteHooks = append(definedDailyDoseBeforeDeleteHooks, definedDailyDoseHook) + case boil.AfterDeleteHook: + definedDailyDoseAfterDeleteHooks = append(definedDailyDoseAfterDeleteHooks, definedDailyDoseHook) + case boil.BeforeUpsertHook: + definedDailyDoseBeforeUpsertHooks = append(definedDailyDoseBeforeUpsertHooks, definedDailyDoseHook) + case boil.AfterUpsertHook: + definedDailyDoseAfterUpsertHooks = append(definedDailyDoseAfterUpsertHooks, definedDailyDoseHook) + } +} + +// One returns a single definedDailyDose record from the query. +func (q definedDailyDoseQuery) One(ctx context.Context, exec boil.ContextExecutor) (*DefinedDailyDose, error) { + o := &DefinedDailyDose{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for defined_daily_dose") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all DefinedDailyDose records from the query. +func (q definedDailyDoseQuery) All(ctx context.Context, exec boil.ContextExecutor) (DefinedDailyDoseSlice, error) { + var o []*DefinedDailyDose + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to DefinedDailyDose slice") + } + + if len(definedDailyDoseAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all DefinedDailyDose records in the query. +func (q definedDailyDoseQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count defined_daily_dose rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q definedDailyDoseQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if defined_daily_dose exists") + } + + return count > 0, nil +} + +// AtcCodeAtcClassification pointed to by the foreign key. +func (o *DefinedDailyDose) AtcCodeAtcClassification(mods ...qm.QueryMod) atcClassificationQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"level5\" = ?", o.AtcCode), + } + + queryMods = append(queryMods, mods...) + + return AtcClassifications(queryMods...) +} + +// LoadAtcCodeAtcClassification allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (definedDailyDoseL) LoadAtcCodeAtcClassification(ctx context.Context, e boil.ContextExecutor, singular bool, maybeDefinedDailyDose interface{}, mods queries.Applicator) error { + var slice []*DefinedDailyDose + var object *DefinedDailyDose + + if singular { + object = maybeDefinedDailyDose.(*DefinedDailyDose) + } else { + slice = *maybeDefinedDailyDose.(*[]*DefinedDailyDose) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &definedDailyDoseR{} + } + args = append(args, object.AtcCode) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &definedDailyDoseR{} + } + + for _, a := range args { + if a == obj.AtcCode { + continue Outer + } + } + + args = append(args, obj.AtcCode) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`atc_classification`), + qm.WhereIn(`atc_classification.level5 in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load AtcClassification") + } + + var resultSlice []*AtcClassification + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice AtcClassification") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for atc_classification") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for atc_classification") + } + + if len(definedDailyDoseAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.AtcCodeAtcClassification = foreign + if foreign.R == nil { + foreign.R = &atcClassificationR{} + } + foreign.R.AtcCodeDefinedDailyDoses = append(foreign.R.AtcCodeDefinedDailyDoses, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.AtcCode == foreign.Level5 { + local.R.AtcCodeAtcClassification = foreign + if foreign.R == nil { + foreign.R = &atcClassificationR{} + } + foreign.R.AtcCodeDefinedDailyDoses = append(foreign.R.AtcCodeDefinedDailyDoses, local) + break + } + } + } + + return nil +} + +// SetAtcCodeAtcClassification of the definedDailyDose to the related item. +// Sets o.R.AtcCodeAtcClassification to related. +// Adds o to related.R.AtcCodeDefinedDailyDoses. +func (o *DefinedDailyDose) SetAtcCodeAtcClassification(ctx context.Context, exec boil.ContextExecutor, insert bool, related *AtcClassification) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"defined_daily_dose\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"atc_code"}), + strmangle.WhereClause("\"", "\"", 0, definedDailyDosePrimaryKeyColumns), + ) + values := []interface{}{related.Level5, o.DDDID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.AtcCode = related.Level5 + if o.R == nil { + o.R = &definedDailyDoseR{ + AtcCodeAtcClassification: related, + } + } else { + o.R.AtcCodeAtcClassification = related + } + + if related.R == nil { + related.R = &atcClassificationR{ + AtcCodeDefinedDailyDoses: DefinedDailyDoseSlice{o}, + } + } else { + related.R.AtcCodeDefinedDailyDoses = append(related.R.AtcCodeDefinedDailyDoses, o) + } + + return nil +} + +// DefinedDailyDoses retrieves all the records using an executor. +func DefinedDailyDoses(mods ...qm.QueryMod) definedDailyDoseQuery { + mods = append(mods, qm.From("\"defined_daily_dose\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"defined_daily_dose\".*"}) + } + + return definedDailyDoseQuery{q} +} + +// FindDefinedDailyDose retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindDefinedDailyDose(ctx context.Context, exec boil.ContextExecutor, dDDID int64, selectCols ...string) (*DefinedDailyDose, error) { + definedDailyDoseObj := &DefinedDailyDose{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"defined_daily_dose\" where \"ddd_id\"=?", sel, + ) + + q := queries.Raw(query, dDDID) + + err := q.Bind(ctx, exec, definedDailyDoseObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from defined_daily_dose") + } + + if err = definedDailyDoseObj.doAfterSelectHooks(ctx, exec); err != nil { + return definedDailyDoseObj, err + } + + return definedDailyDoseObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *DefinedDailyDose) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no defined_daily_dose provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(definedDailyDoseColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + definedDailyDoseInsertCacheMut.RLock() + cache, cached := definedDailyDoseInsertCache[key] + definedDailyDoseInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + definedDailyDoseAllColumns, + definedDailyDoseColumnsWithDefault, + definedDailyDoseColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(definedDailyDoseType, definedDailyDoseMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(definedDailyDoseType, definedDailyDoseMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"defined_daily_dose\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"defined_daily_dose\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into defined_daily_dose") + } + + if !cached { + definedDailyDoseInsertCacheMut.Lock() + definedDailyDoseInsertCache[key] = cache + definedDailyDoseInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the DefinedDailyDose. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *DefinedDailyDose) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + definedDailyDoseUpdateCacheMut.RLock() + cache, cached := definedDailyDoseUpdateCache[key] + definedDailyDoseUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + definedDailyDoseAllColumns, + definedDailyDosePrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update defined_daily_dose, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"defined_daily_dose\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, definedDailyDosePrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(definedDailyDoseType, definedDailyDoseMapping, append(wl, definedDailyDosePrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update defined_daily_dose row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for defined_daily_dose") + } + + if !cached { + definedDailyDoseUpdateCacheMut.Lock() + definedDailyDoseUpdateCache[key] = cache + definedDailyDoseUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q definedDailyDoseQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for defined_daily_dose") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for defined_daily_dose") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o DefinedDailyDoseSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), definedDailyDosePrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"defined_daily_dose\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, definedDailyDosePrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in definedDailyDose slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all definedDailyDose") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *DefinedDailyDose) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no defined_daily_dose provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(definedDailyDoseColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + definedDailyDoseUpsertCacheMut.RLock() + cache, cached := definedDailyDoseUpsertCache[key] + definedDailyDoseUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + definedDailyDoseAllColumns, + definedDailyDoseColumnsWithDefault, + definedDailyDoseColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + definedDailyDoseAllColumns, + definedDailyDosePrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert defined_daily_dose, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(definedDailyDosePrimaryKeyColumns)) + copy(conflict, definedDailyDosePrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"defined_daily_dose\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(definedDailyDoseType, definedDailyDoseMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(definedDailyDoseType, definedDailyDoseMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert defined_daily_dose") + } + + if !cached { + definedDailyDoseUpsertCacheMut.Lock() + definedDailyDoseUpsertCache[key] = cache + definedDailyDoseUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single DefinedDailyDose record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *DefinedDailyDose) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no DefinedDailyDose provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), definedDailyDosePrimaryKeyMapping) + sql := "DELETE FROM \"defined_daily_dose\" WHERE \"ddd_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from defined_daily_dose") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for defined_daily_dose") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q definedDailyDoseQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no definedDailyDoseQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from defined_daily_dose") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for defined_daily_dose") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o DefinedDailyDoseSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(definedDailyDoseBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), definedDailyDosePrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"defined_daily_dose\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, definedDailyDosePrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from definedDailyDose slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for defined_daily_dose") + } + + if len(definedDailyDoseAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *DefinedDailyDose) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindDefinedDailyDose(ctx, exec, o.DDDID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *DefinedDailyDoseSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := DefinedDailyDoseSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), definedDailyDosePrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"defined_daily_dose\".* FROM \"defined_daily_dose\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, definedDailyDosePrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in DefinedDailyDoseSlice") + } + + *o = slice + + return nil +} + +// DefinedDailyDoseExists checks if the DefinedDailyDose row exists. +func DefinedDailyDoseExists(ctx context.Context, exec boil.ContextExecutor, dDDID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"defined_daily_dose\" where \"ddd_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, dDDID) + } + row := exec.QueryRowContext(ctx, sql, dDDID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if defined_daily_dose exists") + } + + return exists, nil +} diff --git a/models/docs.go b/models/docs.go new file mode 100644 index 0000000..fee7f82 --- /dev/null +++ b/models/docs.go @@ -0,0 +1,1939 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// Doc is an object representing the database table. +type Doc struct { + DocID int64 `boil:"doc_id" json:"doc_id" toml:"doc_id" yaml:"doc_id"` + Journal null.String `boil:"journal" json:"journal,omitempty" toml:"journal" yaml:"journal,omitempty"` + Year null.Int64 `boil:"year" json:"year,omitempty" toml:"year" yaml:"year,omitempty"` + Volume null.String `boil:"volume" json:"volume,omitempty" toml:"volume" yaml:"volume,omitempty"` + Issue null.String `boil:"issue" json:"issue,omitempty" toml:"issue" yaml:"issue,omitempty"` + FirstPage null.String `boil:"first_page" json:"first_page,omitempty" toml:"first_page" yaml:"first_page,omitempty"` + LastPage null.String `boil:"last_page" json:"last_page,omitempty" toml:"last_page" yaml:"last_page,omitempty"` + PubmedID null.Int64 `boil:"pubmed_id" json:"pubmed_id,omitempty" toml:"pubmed_id" yaml:"pubmed_id,omitempty"` + Doi null.String `boil:"doi" json:"doi,omitempty" toml:"doi" yaml:"doi,omitempty"` + ChemblID string `boil:"chembl_id" json:"chembl_id" toml:"chembl_id" yaml:"chembl_id"` + Title null.String `boil:"title" json:"title,omitempty" toml:"title" yaml:"title,omitempty"` + DocType string `boil:"doc_type" json:"doc_type" toml:"doc_type" yaml:"doc_type"` + Authors null.String `boil:"authors" json:"authors,omitempty" toml:"authors" yaml:"authors,omitempty"` + Abstract null.String `boil:"abstract" json:"abstract,omitempty" toml:"abstract" yaml:"abstract,omitempty"` + PatentID null.String `boil:"patent_id" json:"patent_id,omitempty" toml:"patent_id" yaml:"patent_id,omitempty"` + Ridx string `boil:"ridx" json:"ridx" toml:"ridx" yaml:"ridx"` + SRCID int64 `boil:"src_id" json:"src_id" toml:"src_id" yaml:"src_id"` + + R *docR `boil:"-" json:"-" toml:"-" yaml:"-"` + L docL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var DocColumns = struct { + DocID string + Journal string + Year string + Volume string + Issue string + FirstPage string + LastPage string + PubmedID string + Doi string + ChemblID string + Title string + DocType string + Authors string + Abstract string + PatentID string + Ridx string + SRCID string +}{ + DocID: "doc_id", + Journal: "journal", + Year: "year", + Volume: "volume", + Issue: "issue", + FirstPage: "first_page", + LastPage: "last_page", + PubmedID: "pubmed_id", + Doi: "doi", + ChemblID: "chembl_id", + Title: "title", + DocType: "doc_type", + Authors: "authors", + Abstract: "abstract", + PatentID: "patent_id", + Ridx: "ridx", + SRCID: "src_id", +} + +var DocTableColumns = struct { + DocID string + Journal string + Year string + Volume string + Issue string + FirstPage string + LastPage string + PubmedID string + Doi string + ChemblID string + Title string + DocType string + Authors string + Abstract string + PatentID string + Ridx string + SRCID string +}{ + DocID: "docs.doc_id", + Journal: "docs.journal", + Year: "docs.year", + Volume: "docs.volume", + Issue: "docs.issue", + FirstPage: "docs.first_page", + LastPage: "docs.last_page", + PubmedID: "docs.pubmed_id", + Doi: "docs.doi", + ChemblID: "docs.chembl_id", + Title: "docs.title", + DocType: "docs.doc_type", + Authors: "docs.authors", + Abstract: "docs.abstract", + PatentID: "docs.patent_id", + Ridx: "docs.ridx", + SRCID: "docs.src_id", +} + +// Generated where + +var DocWhere = struct { + DocID whereHelperint64 + Journal whereHelpernull_String + Year whereHelpernull_Int64 + Volume whereHelpernull_String + Issue whereHelpernull_String + FirstPage whereHelpernull_String + LastPage whereHelpernull_String + PubmedID whereHelpernull_Int64 + Doi whereHelpernull_String + ChemblID whereHelperstring + Title whereHelpernull_String + DocType whereHelperstring + Authors whereHelpernull_String + Abstract whereHelpernull_String + PatentID whereHelpernull_String + Ridx whereHelperstring + SRCID whereHelperint64 +}{ + DocID: whereHelperint64{field: "\"docs\".\"doc_id\""}, + Journal: whereHelpernull_String{field: "\"docs\".\"journal\""}, + Year: whereHelpernull_Int64{field: "\"docs\".\"year\""}, + Volume: whereHelpernull_String{field: "\"docs\".\"volume\""}, + Issue: whereHelpernull_String{field: "\"docs\".\"issue\""}, + FirstPage: whereHelpernull_String{field: "\"docs\".\"first_page\""}, + LastPage: whereHelpernull_String{field: "\"docs\".\"last_page\""}, + PubmedID: whereHelpernull_Int64{field: "\"docs\".\"pubmed_id\""}, + Doi: whereHelpernull_String{field: "\"docs\".\"doi\""}, + ChemblID: whereHelperstring{field: "\"docs\".\"chembl_id\""}, + Title: whereHelpernull_String{field: "\"docs\".\"title\""}, + DocType: whereHelperstring{field: "\"docs\".\"doc_type\""}, + Authors: whereHelpernull_String{field: "\"docs\".\"authors\""}, + Abstract: whereHelpernull_String{field: "\"docs\".\"abstract\""}, + PatentID: whereHelpernull_String{field: "\"docs\".\"patent_id\""}, + Ridx: whereHelperstring{field: "\"docs\".\"ridx\""}, + SRCID: whereHelperint64{field: "\"docs\".\"src_id\""}, +} + +// DocRels is where relationship names are stored. +var DocRels = struct { + SRC string + Chembl string + Activities string + Assays string + CompoundRecords string +}{ + SRC: "SRC", + Chembl: "Chembl", + Activities: "Activities", + Assays: "Assays", + CompoundRecords: "CompoundRecords", +} + +// docR is where relationships are stored. +type docR struct { + SRC *Source `boil:"SRC" json:"SRC" toml:"SRC" yaml:"SRC"` + Chembl *ChemblIDLookup `boil:"Chembl" json:"Chembl" toml:"Chembl" yaml:"Chembl"` + Activities ActivitySlice `boil:"Activities" json:"Activities" toml:"Activities" yaml:"Activities"` + Assays AssaySlice `boil:"Assays" json:"Assays" toml:"Assays" yaml:"Assays"` + CompoundRecords CompoundRecordSlice `boil:"CompoundRecords" json:"CompoundRecords" toml:"CompoundRecords" yaml:"CompoundRecords"` +} + +// NewStruct creates a new relationship struct +func (*docR) NewStruct() *docR { + return &docR{} +} + +func (r *docR) GetSRC() *Source { + if r == nil { + return nil + } + return r.SRC +} + +func (r *docR) GetChembl() *ChemblIDLookup { + if r == nil { + return nil + } + return r.Chembl +} + +func (r *docR) GetActivities() ActivitySlice { + if r == nil { + return nil + } + return r.Activities +} + +func (r *docR) GetAssays() AssaySlice { + if r == nil { + return nil + } + return r.Assays +} + +func (r *docR) GetCompoundRecords() CompoundRecordSlice { + if r == nil { + return nil + } + return r.CompoundRecords +} + +// docL is where Load methods for each relationship are stored. +type docL struct{} + +var ( + docAllColumns = []string{"doc_id", "journal", "year", "volume", "issue", "first_page", "last_page", "pubmed_id", "doi", "chembl_id", "title", "doc_type", "authors", "abstract", "patent_id", "ridx", "src_id"} + docColumnsWithoutDefault = []string{"doc_id", "chembl_id", "doc_type", "ridx", "src_id"} + docColumnsWithDefault = []string{"journal", "year", "volume", "issue", "first_page", "last_page", "pubmed_id", "doi", "title", "authors", "abstract", "patent_id"} + docPrimaryKeyColumns = []string{"doc_id"} + docGeneratedColumns = []string{} +) + +type ( + // DocSlice is an alias for a slice of pointers to Doc. + // This should almost always be used instead of []Doc. + DocSlice []*Doc + // DocHook is the signature for custom Doc hook methods + DocHook func(context.Context, boil.ContextExecutor, *Doc) error + + docQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + docType = reflect.TypeOf(&Doc{}) + docMapping = queries.MakeStructMapping(docType) + docPrimaryKeyMapping, _ = queries.BindMapping(docType, docMapping, docPrimaryKeyColumns) + docInsertCacheMut sync.RWMutex + docInsertCache = make(map[string]insertCache) + docUpdateCacheMut sync.RWMutex + docUpdateCache = make(map[string]updateCache) + docUpsertCacheMut sync.RWMutex + docUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var docAfterSelectHooks []DocHook + +var docBeforeInsertHooks []DocHook +var docAfterInsertHooks []DocHook + +var docBeforeUpdateHooks []DocHook +var docAfterUpdateHooks []DocHook + +var docBeforeDeleteHooks []DocHook +var docAfterDeleteHooks []DocHook + +var docBeforeUpsertHooks []DocHook +var docAfterUpsertHooks []DocHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *Doc) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range docAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *Doc) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range docBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *Doc) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range docAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *Doc) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range docBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *Doc) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range docAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *Doc) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range docBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *Doc) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range docAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *Doc) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range docBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *Doc) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range docAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddDocHook registers your hook function for all future operations. +func AddDocHook(hookPoint boil.HookPoint, docHook DocHook) { + switch hookPoint { + case boil.AfterSelectHook: + docAfterSelectHooks = append(docAfterSelectHooks, docHook) + case boil.BeforeInsertHook: + docBeforeInsertHooks = append(docBeforeInsertHooks, docHook) + case boil.AfterInsertHook: + docAfterInsertHooks = append(docAfterInsertHooks, docHook) + case boil.BeforeUpdateHook: + docBeforeUpdateHooks = append(docBeforeUpdateHooks, docHook) + case boil.AfterUpdateHook: + docAfterUpdateHooks = append(docAfterUpdateHooks, docHook) + case boil.BeforeDeleteHook: + docBeforeDeleteHooks = append(docBeforeDeleteHooks, docHook) + case boil.AfterDeleteHook: + docAfterDeleteHooks = append(docAfterDeleteHooks, docHook) + case boil.BeforeUpsertHook: + docBeforeUpsertHooks = append(docBeforeUpsertHooks, docHook) + case boil.AfterUpsertHook: + docAfterUpsertHooks = append(docAfterUpsertHooks, docHook) + } +} + +// One returns a single doc record from the query. +func (q docQuery) One(ctx context.Context, exec boil.ContextExecutor) (*Doc, error) { + o := &Doc{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for docs") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all Doc records from the query. +func (q docQuery) All(ctx context.Context, exec boil.ContextExecutor) (DocSlice, error) { + var o []*Doc + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to Doc slice") + } + + if len(docAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all Doc records in the query. +func (q docQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count docs rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q docQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if docs exists") + } + + return count > 0, nil +} + +// SRC pointed to by the foreign key. +func (o *Doc) SRC(mods ...qm.QueryMod) sourceQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"src_id\" = ?", o.SRCID), + } + + queryMods = append(queryMods, mods...) + + return Sources(queryMods...) +} + +// Chembl pointed to by the foreign key. +func (o *Doc) Chembl(mods ...qm.QueryMod) chemblIDLookupQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"chembl_id\" = ?", o.ChemblID), + } + + queryMods = append(queryMods, mods...) + + return ChemblIDLookups(queryMods...) +} + +// Activities retrieves all the activity's Activities with an executor. +func (o *Doc) Activities(mods ...qm.QueryMod) activityQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"activities\".\"doc_id\"=?", o.DocID), + ) + + return Activities(queryMods...) +} + +// Assays retrieves all the assay's Assays with an executor. +func (o *Doc) Assays(mods ...qm.QueryMod) assayQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"assays\".\"doc_id\"=?", o.DocID), + ) + + return Assays(queryMods...) +} + +// CompoundRecords retrieves all the compound_record's CompoundRecords with an executor. +func (o *Doc) CompoundRecords(mods ...qm.QueryMod) compoundRecordQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"compound_records\".\"doc_id\"=?", o.DocID), + ) + + return CompoundRecords(queryMods...) +} + +// LoadSRC allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (docL) LoadSRC(ctx context.Context, e boil.ContextExecutor, singular bool, maybeDoc interface{}, mods queries.Applicator) error { + var slice []*Doc + var object *Doc + + if singular { + object = maybeDoc.(*Doc) + } else { + slice = *maybeDoc.(*[]*Doc) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &docR{} + } + args = append(args, object.SRCID) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &docR{} + } + + for _, a := range args { + if a == obj.SRCID { + continue Outer + } + } + + args = append(args, obj.SRCID) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`source`), + qm.WhereIn(`source.src_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load Source") + } + + var resultSlice []*Source + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice Source") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for source") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for source") + } + + if len(docAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.SRC = foreign + if foreign.R == nil { + foreign.R = &sourceR{} + } + foreign.R.SRCDocs = append(foreign.R.SRCDocs, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.SRCID == foreign.SRCID { + local.R.SRC = foreign + if foreign.R == nil { + foreign.R = &sourceR{} + } + foreign.R.SRCDocs = append(foreign.R.SRCDocs, local) + break + } + } + } + + return nil +} + +// LoadChembl allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (docL) LoadChembl(ctx context.Context, e boil.ContextExecutor, singular bool, maybeDoc interface{}, mods queries.Applicator) error { + var slice []*Doc + var object *Doc + + if singular { + object = maybeDoc.(*Doc) + } else { + slice = *maybeDoc.(*[]*Doc) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &docR{} + } + args = append(args, object.ChemblID) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &docR{} + } + + for _, a := range args { + if a == obj.ChemblID { + continue Outer + } + } + + args = append(args, obj.ChemblID) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`chembl_id_lookup`), + qm.WhereIn(`chembl_id_lookup.chembl_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load ChemblIDLookup") + } + + var resultSlice []*ChemblIDLookup + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice ChemblIDLookup") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for chembl_id_lookup") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for chembl_id_lookup") + } + + if len(docAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Chembl = foreign + if foreign.R == nil { + foreign.R = &chemblIDLookupR{} + } + foreign.R.ChemblDoc = object + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.ChemblID == foreign.ChemblID { + local.R.Chembl = foreign + if foreign.R == nil { + foreign.R = &chemblIDLookupR{} + } + foreign.R.ChemblDoc = local + break + } + } + } + + return nil +} + +// LoadActivities allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (docL) LoadActivities(ctx context.Context, e boil.ContextExecutor, singular bool, maybeDoc interface{}, mods queries.Applicator) error { + var slice []*Doc + var object *Doc + + if singular { + object = maybeDoc.(*Doc) + } else { + slice = *maybeDoc.(*[]*Doc) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &docR{} + } + args = append(args, object.DocID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &docR{} + } + + for _, a := range args { + if queries.Equal(a, obj.DocID) { + continue Outer + } + } + + args = append(args, obj.DocID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`activities`), + qm.WhereIn(`activities.doc_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load activities") + } + + var resultSlice []*Activity + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice activities") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on activities") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for activities") + } + + if len(activityAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.Activities = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &activityR{} + } + foreign.R.Doc = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.DocID, foreign.DocID) { + local.R.Activities = append(local.R.Activities, foreign) + if foreign.R == nil { + foreign.R = &activityR{} + } + foreign.R.Doc = local + break + } + } + } + + return nil +} + +// LoadAssays allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (docL) LoadAssays(ctx context.Context, e boil.ContextExecutor, singular bool, maybeDoc interface{}, mods queries.Applicator) error { + var slice []*Doc + var object *Doc + + if singular { + object = maybeDoc.(*Doc) + } else { + slice = *maybeDoc.(*[]*Doc) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &docR{} + } + args = append(args, object.DocID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &docR{} + } + + for _, a := range args { + if a == obj.DocID { + continue Outer + } + } + + args = append(args, obj.DocID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`assays`), + qm.WhereIn(`assays.doc_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load assays") + } + + var resultSlice []*Assay + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice assays") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on assays") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for assays") + } + + if len(assayAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.Assays = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &assayR{} + } + foreign.R.Doc = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.DocID == foreign.DocID { + local.R.Assays = append(local.R.Assays, foreign) + if foreign.R == nil { + foreign.R = &assayR{} + } + foreign.R.Doc = local + break + } + } + } + + return nil +} + +// LoadCompoundRecords allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (docL) LoadCompoundRecords(ctx context.Context, e boil.ContextExecutor, singular bool, maybeDoc interface{}, mods queries.Applicator) error { + var slice []*Doc + var object *Doc + + if singular { + object = maybeDoc.(*Doc) + } else { + slice = *maybeDoc.(*[]*Doc) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &docR{} + } + args = append(args, object.DocID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &docR{} + } + + for _, a := range args { + if a == obj.DocID { + continue Outer + } + } + + args = append(args, obj.DocID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`compound_records`), + qm.WhereIn(`compound_records.doc_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load compound_records") + } + + var resultSlice []*CompoundRecord + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice compound_records") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on compound_records") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for compound_records") + } + + if len(compoundRecordAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.CompoundRecords = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &compoundRecordR{} + } + foreign.R.Doc = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.DocID == foreign.DocID { + local.R.CompoundRecords = append(local.R.CompoundRecords, foreign) + if foreign.R == nil { + foreign.R = &compoundRecordR{} + } + foreign.R.Doc = local + break + } + } + } + + return nil +} + +// SetSRC of the doc to the related item. +// Sets o.R.SRC to related. +// Adds o to related.R.SRCDocs. +func (o *Doc) SetSRC(ctx context.Context, exec boil.ContextExecutor, insert bool, related *Source) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"docs\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"src_id"}), + strmangle.WhereClause("\"", "\"", 0, docPrimaryKeyColumns), + ) + values := []interface{}{related.SRCID, o.DocID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.SRCID = related.SRCID + if o.R == nil { + o.R = &docR{ + SRC: related, + } + } else { + o.R.SRC = related + } + + if related.R == nil { + related.R = &sourceR{ + SRCDocs: DocSlice{o}, + } + } else { + related.R.SRCDocs = append(related.R.SRCDocs, o) + } + + return nil +} + +// SetChembl of the doc to the related item. +// Sets o.R.Chembl to related. +// Adds o to related.R.ChemblDoc. +func (o *Doc) SetChembl(ctx context.Context, exec boil.ContextExecutor, insert bool, related *ChemblIDLookup) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"docs\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"chembl_id"}), + strmangle.WhereClause("\"", "\"", 0, docPrimaryKeyColumns), + ) + values := []interface{}{related.ChemblID, o.DocID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.ChemblID = related.ChemblID + if o.R == nil { + o.R = &docR{ + Chembl: related, + } + } else { + o.R.Chembl = related + } + + if related.R == nil { + related.R = &chemblIDLookupR{ + ChemblDoc: o, + } + } else { + related.R.ChemblDoc = o + } + + return nil +} + +// AddActivities adds the given related objects to the existing relationships +// of the doc, optionally inserting them as new records. +// Appends related to o.R.Activities. +// Sets related.R.Doc appropriately. +func (o *Doc) AddActivities(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Activity) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.DocID, o.DocID) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"activities\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"doc_id"}), + strmangle.WhereClause("\"", "\"", 0, activityPrimaryKeyColumns), + ) + values := []interface{}{o.DocID, rel.ActivityID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.DocID, o.DocID) + } + } + + if o.R == nil { + o.R = &docR{ + Activities: related, + } + } else { + o.R.Activities = append(o.R.Activities, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &activityR{ + Doc: o, + } + } else { + rel.R.Doc = o + } + } + return nil +} + +// SetActivities removes all previously related items of the +// doc replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.Doc's Activities accordingly. +// Replaces o.R.Activities with related. +// Sets related.R.Doc's Activities accordingly. +func (o *Doc) SetActivities(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Activity) error { + query := "update \"activities\" set \"doc_id\" = null where \"doc_id\" = ?" + values := []interface{}{o.DocID} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + if o.R != nil { + for _, rel := range o.R.Activities { + queries.SetScanner(&rel.DocID, nil) + if rel.R == nil { + continue + } + + rel.R.Doc = nil + } + o.R.Activities = nil + } + + return o.AddActivities(ctx, exec, insert, related...) +} + +// RemoveActivities relationships from objects passed in. +// Removes related items from R.Activities (uses pointer comparison, removal does not keep order) +// Sets related.R.Doc. +func (o *Doc) RemoveActivities(ctx context.Context, exec boil.ContextExecutor, related ...*Activity) error { + if len(related) == 0 { + return nil + } + + var err error + for _, rel := range related { + queries.SetScanner(&rel.DocID, nil) + if rel.R != nil { + rel.R.Doc = nil + } + if _, err = rel.Update(ctx, exec, boil.Whitelist("doc_id")); err != nil { + return err + } + } + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.Activities { + if rel != ri { + continue + } + + ln := len(o.R.Activities) + if ln > 1 && i < ln-1 { + o.R.Activities[i] = o.R.Activities[ln-1] + } + o.R.Activities = o.R.Activities[:ln-1] + break + } + } + + return nil +} + +// AddAssays adds the given related objects to the existing relationships +// of the doc, optionally inserting them as new records. +// Appends related to o.R.Assays. +// Sets related.R.Doc appropriately. +func (o *Doc) AddAssays(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Assay) error { + var err error + for _, rel := range related { + if insert { + rel.DocID = o.DocID + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"assays\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"doc_id"}), + strmangle.WhereClause("\"", "\"", 0, assayPrimaryKeyColumns), + ) + values := []interface{}{o.DocID, rel.AssayID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.DocID = o.DocID + } + } + + if o.R == nil { + o.R = &docR{ + Assays: related, + } + } else { + o.R.Assays = append(o.R.Assays, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &assayR{ + Doc: o, + } + } else { + rel.R.Doc = o + } + } + return nil +} + +// AddCompoundRecords adds the given related objects to the existing relationships +// of the doc, optionally inserting them as new records. +// Appends related to o.R.CompoundRecords. +// Sets related.R.Doc appropriately. +func (o *Doc) AddCompoundRecords(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*CompoundRecord) error { + var err error + for _, rel := range related { + if insert { + rel.DocID = o.DocID + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"compound_records\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"doc_id"}), + strmangle.WhereClause("\"", "\"", 0, compoundRecordPrimaryKeyColumns), + ) + values := []interface{}{o.DocID, rel.RecordID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.DocID = o.DocID + } + } + + if o.R == nil { + o.R = &docR{ + CompoundRecords: related, + } + } else { + o.R.CompoundRecords = append(o.R.CompoundRecords, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &compoundRecordR{ + Doc: o, + } + } else { + rel.R.Doc = o + } + } + return nil +} + +// Docs retrieves all the records using an executor. +func Docs(mods ...qm.QueryMod) docQuery { + mods = append(mods, qm.From("\"docs\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"docs\".*"}) + } + + return docQuery{q} +} + +// FindDoc retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindDoc(ctx context.Context, exec boil.ContextExecutor, docID int64, selectCols ...string) (*Doc, error) { + docObj := &Doc{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"docs\" where \"doc_id\"=?", sel, + ) + + q := queries.Raw(query, docID) + + err := q.Bind(ctx, exec, docObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from docs") + } + + if err = docObj.doAfterSelectHooks(ctx, exec); err != nil { + return docObj, err + } + + return docObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *Doc) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no docs provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(docColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + docInsertCacheMut.RLock() + cache, cached := docInsertCache[key] + docInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + docAllColumns, + docColumnsWithDefault, + docColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(docType, docMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(docType, docMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"docs\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"docs\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into docs") + } + + if !cached { + docInsertCacheMut.Lock() + docInsertCache[key] = cache + docInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the Doc. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *Doc) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + docUpdateCacheMut.RLock() + cache, cached := docUpdateCache[key] + docUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + docAllColumns, + docPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update docs, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"docs\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, docPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(docType, docMapping, append(wl, docPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update docs row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for docs") + } + + if !cached { + docUpdateCacheMut.Lock() + docUpdateCache[key] = cache + docUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q docQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for docs") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for docs") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o DocSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), docPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"docs\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, docPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in doc slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all doc") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *Doc) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no docs provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(docColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + docUpsertCacheMut.RLock() + cache, cached := docUpsertCache[key] + docUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + docAllColumns, + docColumnsWithDefault, + docColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + docAllColumns, + docPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert docs, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(docPrimaryKeyColumns)) + copy(conflict, docPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"docs\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(docType, docMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(docType, docMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert docs") + } + + if !cached { + docUpsertCacheMut.Lock() + docUpsertCache[key] = cache + docUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single Doc record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *Doc) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no Doc provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), docPrimaryKeyMapping) + sql := "DELETE FROM \"docs\" WHERE \"doc_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from docs") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for docs") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q docQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no docQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from docs") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for docs") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o DocSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(docBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), docPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"docs\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, docPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from doc slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for docs") + } + + if len(docAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *Doc) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindDoc(ctx, exec, o.DocID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *DocSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := DocSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), docPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"docs\".* FROM \"docs\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, docPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in DocSlice") + } + + *o = slice + + return nil +} + +// DocExists checks if the Doc row exists. +func DocExists(ctx context.Context, exec boil.ContextExecutor, docID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"docs\" where \"doc_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, docID) + } + row := exec.QueryRowContext(ctx, sql, docID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if docs exists") + } + + return exists, nil +} diff --git a/models/domains.go b/models/domains.go new file mode 100644 index 0000000..f380ce3 --- /dev/null +++ b/models/domains.go @@ -0,0 +1,1410 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// Domain is an object representing the database table. +type Domain struct { + DomainID int64 `boil:"domain_id" json:"domain_id" toml:"domain_id" yaml:"domain_id"` + DomainType string `boil:"domain_type" json:"domain_type" toml:"domain_type" yaml:"domain_type"` + SourceDomainID string `boil:"source_domain_id" json:"source_domain_id" toml:"source_domain_id" yaml:"source_domain_id"` + DomainName null.String `boil:"domain_name" json:"domain_name,omitempty" toml:"domain_name" yaml:"domain_name,omitempty"` + DomainDescription null.String `boil:"domain_description" json:"domain_description,omitempty" toml:"domain_description" yaml:"domain_description,omitempty"` + + R *domainR `boil:"-" json:"-" toml:"-" yaml:"-"` + L domainL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var DomainColumns = struct { + DomainID string + DomainType string + SourceDomainID string + DomainName string + DomainDescription string +}{ + DomainID: "domain_id", + DomainType: "domain_type", + SourceDomainID: "source_domain_id", + DomainName: "domain_name", + DomainDescription: "domain_description", +} + +var DomainTableColumns = struct { + DomainID string + DomainType string + SourceDomainID string + DomainName string + DomainDescription string +}{ + DomainID: "domains.domain_id", + DomainType: "domains.domain_type", + SourceDomainID: "domains.source_domain_id", + DomainName: "domains.domain_name", + DomainDescription: "domains.domain_description", +} + +// Generated where + +var DomainWhere = struct { + DomainID whereHelperint64 + DomainType whereHelperstring + SourceDomainID whereHelperstring + DomainName whereHelpernull_String + DomainDescription whereHelpernull_String +}{ + DomainID: whereHelperint64{field: "\"domains\".\"domain_id\""}, + DomainType: whereHelperstring{field: "\"domains\".\"domain_type\""}, + SourceDomainID: whereHelperstring{field: "\"domains\".\"source_domain_id\""}, + DomainName: whereHelpernull_String{field: "\"domains\".\"domain_name\""}, + DomainDescription: whereHelpernull_String{field: "\"domains\".\"domain_description\""}, +} + +// DomainRels is where relationship names are stored. +var DomainRels = struct { + ComponentDomains string + SiteComponents string +}{ + ComponentDomains: "ComponentDomains", + SiteComponents: "SiteComponents", +} + +// domainR is where relationships are stored. +type domainR struct { + ComponentDomains ComponentDomainSlice `boil:"ComponentDomains" json:"ComponentDomains" toml:"ComponentDomains" yaml:"ComponentDomains"` + SiteComponents SiteComponentSlice `boil:"SiteComponents" json:"SiteComponents" toml:"SiteComponents" yaml:"SiteComponents"` +} + +// NewStruct creates a new relationship struct +func (*domainR) NewStruct() *domainR { + return &domainR{} +} + +func (r *domainR) GetComponentDomains() ComponentDomainSlice { + if r == nil { + return nil + } + return r.ComponentDomains +} + +func (r *domainR) GetSiteComponents() SiteComponentSlice { + if r == nil { + return nil + } + return r.SiteComponents +} + +// domainL is where Load methods for each relationship are stored. +type domainL struct{} + +var ( + domainAllColumns = []string{"domain_id", "domain_type", "source_domain_id", "domain_name", "domain_description"} + domainColumnsWithoutDefault = []string{"domain_id", "domain_type", "source_domain_id"} + domainColumnsWithDefault = []string{"domain_name", "domain_description"} + domainPrimaryKeyColumns = []string{"domain_id"} + domainGeneratedColumns = []string{} +) + +type ( + // DomainSlice is an alias for a slice of pointers to Domain. + // This should almost always be used instead of []Domain. + DomainSlice []*Domain + // DomainHook is the signature for custom Domain hook methods + DomainHook func(context.Context, boil.ContextExecutor, *Domain) error + + domainQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + domainType = reflect.TypeOf(&Domain{}) + domainMapping = queries.MakeStructMapping(domainType) + domainPrimaryKeyMapping, _ = queries.BindMapping(domainType, domainMapping, domainPrimaryKeyColumns) + domainInsertCacheMut sync.RWMutex + domainInsertCache = make(map[string]insertCache) + domainUpdateCacheMut sync.RWMutex + domainUpdateCache = make(map[string]updateCache) + domainUpsertCacheMut sync.RWMutex + domainUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var domainAfterSelectHooks []DomainHook + +var domainBeforeInsertHooks []DomainHook +var domainAfterInsertHooks []DomainHook + +var domainBeforeUpdateHooks []DomainHook +var domainAfterUpdateHooks []DomainHook + +var domainBeforeDeleteHooks []DomainHook +var domainAfterDeleteHooks []DomainHook + +var domainBeforeUpsertHooks []DomainHook +var domainAfterUpsertHooks []DomainHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *Domain) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range domainAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *Domain) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range domainBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *Domain) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range domainAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *Domain) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range domainBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *Domain) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range domainAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *Domain) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range domainBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *Domain) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range domainAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *Domain) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range domainBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *Domain) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range domainAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddDomainHook registers your hook function for all future operations. +func AddDomainHook(hookPoint boil.HookPoint, domainHook DomainHook) { + switch hookPoint { + case boil.AfterSelectHook: + domainAfterSelectHooks = append(domainAfterSelectHooks, domainHook) + case boil.BeforeInsertHook: + domainBeforeInsertHooks = append(domainBeforeInsertHooks, domainHook) + case boil.AfterInsertHook: + domainAfterInsertHooks = append(domainAfterInsertHooks, domainHook) + case boil.BeforeUpdateHook: + domainBeforeUpdateHooks = append(domainBeforeUpdateHooks, domainHook) + case boil.AfterUpdateHook: + domainAfterUpdateHooks = append(domainAfterUpdateHooks, domainHook) + case boil.BeforeDeleteHook: + domainBeforeDeleteHooks = append(domainBeforeDeleteHooks, domainHook) + case boil.AfterDeleteHook: + domainAfterDeleteHooks = append(domainAfterDeleteHooks, domainHook) + case boil.BeforeUpsertHook: + domainBeforeUpsertHooks = append(domainBeforeUpsertHooks, domainHook) + case boil.AfterUpsertHook: + domainAfterUpsertHooks = append(domainAfterUpsertHooks, domainHook) + } +} + +// One returns a single domain record from the query. +func (q domainQuery) One(ctx context.Context, exec boil.ContextExecutor) (*Domain, error) { + o := &Domain{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for domains") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all Domain records from the query. +func (q domainQuery) All(ctx context.Context, exec boil.ContextExecutor) (DomainSlice, error) { + var o []*Domain + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to Domain slice") + } + + if len(domainAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all Domain records in the query. +func (q domainQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count domains rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q domainQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if domains exists") + } + + return count > 0, nil +} + +// ComponentDomains retrieves all the component_domain's ComponentDomains with an executor. +func (o *Domain) ComponentDomains(mods ...qm.QueryMod) componentDomainQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"component_domains\".\"domain_id\"=?", o.DomainID), + ) + + return ComponentDomains(queryMods...) +} + +// SiteComponents retrieves all the site_component's SiteComponents with an executor. +func (o *Domain) SiteComponents(mods ...qm.QueryMod) siteComponentQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"site_components\".\"domain_id\"=?", o.DomainID), + ) + + return SiteComponents(queryMods...) +} + +// LoadComponentDomains allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (domainL) LoadComponentDomains(ctx context.Context, e boil.ContextExecutor, singular bool, maybeDomain interface{}, mods queries.Applicator) error { + var slice []*Domain + var object *Domain + + if singular { + object = maybeDomain.(*Domain) + } else { + slice = *maybeDomain.(*[]*Domain) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &domainR{} + } + args = append(args, object.DomainID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &domainR{} + } + + for _, a := range args { + if queries.Equal(a, obj.DomainID) { + continue Outer + } + } + + args = append(args, obj.DomainID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`component_domains`), + qm.WhereIn(`component_domains.domain_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load component_domains") + } + + var resultSlice []*ComponentDomain + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice component_domains") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on component_domains") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for component_domains") + } + + if len(componentDomainAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.ComponentDomains = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &componentDomainR{} + } + foreign.R.Domain = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.DomainID, foreign.DomainID) { + local.R.ComponentDomains = append(local.R.ComponentDomains, foreign) + if foreign.R == nil { + foreign.R = &componentDomainR{} + } + foreign.R.Domain = local + break + } + } + } + + return nil +} + +// LoadSiteComponents allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (domainL) LoadSiteComponents(ctx context.Context, e boil.ContextExecutor, singular bool, maybeDomain interface{}, mods queries.Applicator) error { + var slice []*Domain + var object *Domain + + if singular { + object = maybeDomain.(*Domain) + } else { + slice = *maybeDomain.(*[]*Domain) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &domainR{} + } + args = append(args, object.DomainID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &domainR{} + } + + for _, a := range args { + if queries.Equal(a, obj.DomainID) { + continue Outer + } + } + + args = append(args, obj.DomainID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`site_components`), + qm.WhereIn(`site_components.domain_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load site_components") + } + + var resultSlice []*SiteComponent + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice site_components") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on site_components") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for site_components") + } + + if len(siteComponentAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.SiteComponents = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &siteComponentR{} + } + foreign.R.Domain = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.DomainID, foreign.DomainID) { + local.R.SiteComponents = append(local.R.SiteComponents, foreign) + if foreign.R == nil { + foreign.R = &siteComponentR{} + } + foreign.R.Domain = local + break + } + } + } + + return nil +} + +// AddComponentDomains adds the given related objects to the existing relationships +// of the domain, optionally inserting them as new records. +// Appends related to o.R.ComponentDomains. +// Sets related.R.Domain appropriately. +func (o *Domain) AddComponentDomains(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*ComponentDomain) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.DomainID, o.DomainID) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"component_domains\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"domain_id"}), + strmangle.WhereClause("\"", "\"", 0, componentDomainPrimaryKeyColumns), + ) + values := []interface{}{o.DomainID, rel.CompdID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.DomainID, o.DomainID) + } + } + + if o.R == nil { + o.R = &domainR{ + ComponentDomains: related, + } + } else { + o.R.ComponentDomains = append(o.R.ComponentDomains, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &componentDomainR{ + Domain: o, + } + } else { + rel.R.Domain = o + } + } + return nil +} + +// SetComponentDomains removes all previously related items of the +// domain replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.Domain's ComponentDomains accordingly. +// Replaces o.R.ComponentDomains with related. +// Sets related.R.Domain's ComponentDomains accordingly. +func (o *Domain) SetComponentDomains(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*ComponentDomain) error { + query := "update \"component_domains\" set \"domain_id\" = null where \"domain_id\" = ?" + values := []interface{}{o.DomainID} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + if o.R != nil { + for _, rel := range o.R.ComponentDomains { + queries.SetScanner(&rel.DomainID, nil) + if rel.R == nil { + continue + } + + rel.R.Domain = nil + } + o.R.ComponentDomains = nil + } + + return o.AddComponentDomains(ctx, exec, insert, related...) +} + +// RemoveComponentDomains relationships from objects passed in. +// Removes related items from R.ComponentDomains (uses pointer comparison, removal does not keep order) +// Sets related.R.Domain. +func (o *Domain) RemoveComponentDomains(ctx context.Context, exec boil.ContextExecutor, related ...*ComponentDomain) error { + if len(related) == 0 { + return nil + } + + var err error + for _, rel := range related { + queries.SetScanner(&rel.DomainID, nil) + if rel.R != nil { + rel.R.Domain = nil + } + if _, err = rel.Update(ctx, exec, boil.Whitelist("domain_id")); err != nil { + return err + } + } + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.ComponentDomains { + if rel != ri { + continue + } + + ln := len(o.R.ComponentDomains) + if ln > 1 && i < ln-1 { + o.R.ComponentDomains[i] = o.R.ComponentDomains[ln-1] + } + o.R.ComponentDomains = o.R.ComponentDomains[:ln-1] + break + } + } + + return nil +} + +// AddSiteComponents adds the given related objects to the existing relationships +// of the domain, optionally inserting them as new records. +// Appends related to o.R.SiteComponents. +// Sets related.R.Domain appropriately. +func (o *Domain) AddSiteComponents(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*SiteComponent) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.DomainID, o.DomainID) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"site_components\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"domain_id"}), + strmangle.WhereClause("\"", "\"", 0, siteComponentPrimaryKeyColumns), + ) + values := []interface{}{o.DomainID, rel.SitecompID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.DomainID, o.DomainID) + } + } + + if o.R == nil { + o.R = &domainR{ + SiteComponents: related, + } + } else { + o.R.SiteComponents = append(o.R.SiteComponents, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &siteComponentR{ + Domain: o, + } + } else { + rel.R.Domain = o + } + } + return nil +} + +// SetSiteComponents removes all previously related items of the +// domain replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.Domain's SiteComponents accordingly. +// Replaces o.R.SiteComponents with related. +// Sets related.R.Domain's SiteComponents accordingly. +func (o *Domain) SetSiteComponents(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*SiteComponent) error { + query := "update \"site_components\" set \"domain_id\" = null where \"domain_id\" = ?" + values := []interface{}{o.DomainID} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + if o.R != nil { + for _, rel := range o.R.SiteComponents { + queries.SetScanner(&rel.DomainID, nil) + if rel.R == nil { + continue + } + + rel.R.Domain = nil + } + o.R.SiteComponents = nil + } + + return o.AddSiteComponents(ctx, exec, insert, related...) +} + +// RemoveSiteComponents relationships from objects passed in. +// Removes related items from R.SiteComponents (uses pointer comparison, removal does not keep order) +// Sets related.R.Domain. +func (o *Domain) RemoveSiteComponents(ctx context.Context, exec boil.ContextExecutor, related ...*SiteComponent) error { + if len(related) == 0 { + return nil + } + + var err error + for _, rel := range related { + queries.SetScanner(&rel.DomainID, nil) + if rel.R != nil { + rel.R.Domain = nil + } + if _, err = rel.Update(ctx, exec, boil.Whitelist("domain_id")); err != nil { + return err + } + } + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.SiteComponents { + if rel != ri { + continue + } + + ln := len(o.R.SiteComponents) + if ln > 1 && i < ln-1 { + o.R.SiteComponents[i] = o.R.SiteComponents[ln-1] + } + o.R.SiteComponents = o.R.SiteComponents[:ln-1] + break + } + } + + return nil +} + +// Domains retrieves all the records using an executor. +func Domains(mods ...qm.QueryMod) domainQuery { + mods = append(mods, qm.From("\"domains\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"domains\".*"}) + } + + return domainQuery{q} +} + +// FindDomain retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindDomain(ctx context.Context, exec boil.ContextExecutor, domainID int64, selectCols ...string) (*Domain, error) { + domainObj := &Domain{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"domains\" where \"domain_id\"=?", sel, + ) + + q := queries.Raw(query, domainID) + + err := q.Bind(ctx, exec, domainObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from domains") + } + + if err = domainObj.doAfterSelectHooks(ctx, exec); err != nil { + return domainObj, err + } + + return domainObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *Domain) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no domains provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(domainColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + domainInsertCacheMut.RLock() + cache, cached := domainInsertCache[key] + domainInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + domainAllColumns, + domainColumnsWithDefault, + domainColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(domainType, domainMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(domainType, domainMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"domains\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"domains\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into domains") + } + + if !cached { + domainInsertCacheMut.Lock() + domainInsertCache[key] = cache + domainInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the Domain. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *Domain) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + domainUpdateCacheMut.RLock() + cache, cached := domainUpdateCache[key] + domainUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + domainAllColumns, + domainPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update domains, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"domains\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, domainPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(domainType, domainMapping, append(wl, domainPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update domains row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for domains") + } + + if !cached { + domainUpdateCacheMut.Lock() + domainUpdateCache[key] = cache + domainUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q domainQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for domains") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for domains") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o DomainSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), domainPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"domains\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, domainPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in domain slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all domain") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *Domain) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no domains provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(domainColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + domainUpsertCacheMut.RLock() + cache, cached := domainUpsertCache[key] + domainUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + domainAllColumns, + domainColumnsWithDefault, + domainColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + domainAllColumns, + domainPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert domains, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(domainPrimaryKeyColumns)) + copy(conflict, domainPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"domains\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(domainType, domainMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(domainType, domainMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert domains") + } + + if !cached { + domainUpsertCacheMut.Lock() + domainUpsertCache[key] = cache + domainUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single Domain record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *Domain) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no Domain provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), domainPrimaryKeyMapping) + sql := "DELETE FROM \"domains\" WHERE \"domain_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from domains") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for domains") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q domainQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no domainQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from domains") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for domains") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o DomainSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(domainBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), domainPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"domains\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, domainPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from domain slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for domains") + } + + if len(domainAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *Domain) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindDomain(ctx, exec, o.DomainID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *DomainSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := DomainSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), domainPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"domains\".* FROM \"domains\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, domainPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in DomainSlice") + } + + *o = slice + + return nil +} + +// DomainExists checks if the Domain row exists. +func DomainExists(ctx context.Context, exec boil.ContextExecutor, domainID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"domains\" where \"domain_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, domainID) + } + row := exec.QueryRowContext(ctx, sql, domainID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if domains exists") + } + + return exists, nil +} diff --git a/models/drug_indication.go b/models/drug_indication.go new file mode 100644 index 0000000..bed8f3e --- /dev/null +++ b/models/drug_indication.go @@ -0,0 +1,1489 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// DrugIndication is an object representing the database table. +type DrugIndication struct { + DrugindID int64 `boil:"drugind_id" json:"drugind_id" toml:"drugind_id" yaml:"drugind_id"` + RecordID int64 `boil:"record_id" json:"record_id" toml:"record_id" yaml:"record_id"` + Molregno null.Int64 `boil:"molregno" json:"molregno,omitempty" toml:"molregno" yaml:"molregno,omitempty"` + MaxPhaseForInd null.Int16 `boil:"max_phase_for_ind" json:"max_phase_for_ind,omitempty" toml:"max_phase_for_ind" yaml:"max_phase_for_ind,omitempty"` + MeshID string `boil:"mesh_id" json:"mesh_id" toml:"mesh_id" yaml:"mesh_id"` + MeshHeading string `boil:"mesh_heading" json:"mesh_heading" toml:"mesh_heading" yaml:"mesh_heading"` + EfoID null.String `boil:"efo_id" json:"efo_id,omitempty" toml:"efo_id" yaml:"efo_id,omitempty"` + EfoTerm null.String `boil:"efo_term" json:"efo_term,omitempty" toml:"efo_term" yaml:"efo_term,omitempty"` + + R *drugIndicationR `boil:"-" json:"-" toml:"-" yaml:"-"` + L drugIndicationL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var DrugIndicationColumns = struct { + DrugindID string + RecordID string + Molregno string + MaxPhaseForInd string + MeshID string + MeshHeading string + EfoID string + EfoTerm string +}{ + DrugindID: "drugind_id", + RecordID: "record_id", + Molregno: "molregno", + MaxPhaseForInd: "max_phase_for_ind", + MeshID: "mesh_id", + MeshHeading: "mesh_heading", + EfoID: "efo_id", + EfoTerm: "efo_term", +} + +var DrugIndicationTableColumns = struct { + DrugindID string + RecordID string + Molregno string + MaxPhaseForInd string + MeshID string + MeshHeading string + EfoID string + EfoTerm string +}{ + DrugindID: "drug_indication.drugind_id", + RecordID: "drug_indication.record_id", + Molregno: "drug_indication.molregno", + MaxPhaseForInd: "drug_indication.max_phase_for_ind", + MeshID: "drug_indication.mesh_id", + MeshHeading: "drug_indication.mesh_heading", + EfoID: "drug_indication.efo_id", + EfoTerm: "drug_indication.efo_term", +} + +// Generated where + +var DrugIndicationWhere = struct { + DrugindID whereHelperint64 + RecordID whereHelperint64 + Molregno whereHelpernull_Int64 + MaxPhaseForInd whereHelpernull_Int16 + MeshID whereHelperstring + MeshHeading whereHelperstring + EfoID whereHelpernull_String + EfoTerm whereHelpernull_String +}{ + DrugindID: whereHelperint64{field: "\"drug_indication\".\"drugind_id\""}, + RecordID: whereHelperint64{field: "\"drug_indication\".\"record_id\""}, + Molregno: whereHelpernull_Int64{field: "\"drug_indication\".\"molregno\""}, + MaxPhaseForInd: whereHelpernull_Int16{field: "\"drug_indication\".\"max_phase_for_ind\""}, + MeshID: whereHelperstring{field: "\"drug_indication\".\"mesh_id\""}, + MeshHeading: whereHelperstring{field: "\"drug_indication\".\"mesh_heading\""}, + EfoID: whereHelpernull_String{field: "\"drug_indication\".\"efo_id\""}, + EfoTerm: whereHelpernull_String{field: "\"drug_indication\".\"efo_term\""}, +} + +// DrugIndicationRels is where relationship names are stored. +var DrugIndicationRels = struct { + Record string + MolregnoMoleculeDictionary string + DrugindIndicationRefs string +}{ + Record: "Record", + MolregnoMoleculeDictionary: "MolregnoMoleculeDictionary", + DrugindIndicationRefs: "DrugindIndicationRefs", +} + +// drugIndicationR is where relationships are stored. +type drugIndicationR struct { + Record *CompoundRecord `boil:"Record" json:"Record" toml:"Record" yaml:"Record"` + MolregnoMoleculeDictionary *MoleculeDictionary `boil:"MolregnoMoleculeDictionary" json:"MolregnoMoleculeDictionary" toml:"MolregnoMoleculeDictionary" yaml:"MolregnoMoleculeDictionary"` + DrugindIndicationRefs IndicationRefSlice `boil:"DrugindIndicationRefs" json:"DrugindIndicationRefs" toml:"DrugindIndicationRefs" yaml:"DrugindIndicationRefs"` +} + +// NewStruct creates a new relationship struct +func (*drugIndicationR) NewStruct() *drugIndicationR { + return &drugIndicationR{} +} + +func (r *drugIndicationR) GetRecord() *CompoundRecord { + if r == nil { + return nil + } + return r.Record +} + +func (r *drugIndicationR) GetMolregnoMoleculeDictionary() *MoleculeDictionary { + if r == nil { + return nil + } + return r.MolregnoMoleculeDictionary +} + +func (r *drugIndicationR) GetDrugindIndicationRefs() IndicationRefSlice { + if r == nil { + return nil + } + return r.DrugindIndicationRefs +} + +// drugIndicationL is where Load methods for each relationship are stored. +type drugIndicationL struct{} + +var ( + drugIndicationAllColumns = []string{"drugind_id", "record_id", "molregno", "max_phase_for_ind", "mesh_id", "mesh_heading", "efo_id", "efo_term"} + drugIndicationColumnsWithoutDefault = []string{"drugind_id", "record_id", "mesh_id", "mesh_heading"} + drugIndicationColumnsWithDefault = []string{"molregno", "max_phase_for_ind", "efo_id", "efo_term"} + drugIndicationPrimaryKeyColumns = []string{"drugind_id"} + drugIndicationGeneratedColumns = []string{} +) + +type ( + // DrugIndicationSlice is an alias for a slice of pointers to DrugIndication. + // This should almost always be used instead of []DrugIndication. + DrugIndicationSlice []*DrugIndication + // DrugIndicationHook is the signature for custom DrugIndication hook methods + DrugIndicationHook func(context.Context, boil.ContextExecutor, *DrugIndication) error + + drugIndicationQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + drugIndicationType = reflect.TypeOf(&DrugIndication{}) + drugIndicationMapping = queries.MakeStructMapping(drugIndicationType) + drugIndicationPrimaryKeyMapping, _ = queries.BindMapping(drugIndicationType, drugIndicationMapping, drugIndicationPrimaryKeyColumns) + drugIndicationInsertCacheMut sync.RWMutex + drugIndicationInsertCache = make(map[string]insertCache) + drugIndicationUpdateCacheMut sync.RWMutex + drugIndicationUpdateCache = make(map[string]updateCache) + drugIndicationUpsertCacheMut sync.RWMutex + drugIndicationUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var drugIndicationAfterSelectHooks []DrugIndicationHook + +var drugIndicationBeforeInsertHooks []DrugIndicationHook +var drugIndicationAfterInsertHooks []DrugIndicationHook + +var drugIndicationBeforeUpdateHooks []DrugIndicationHook +var drugIndicationAfterUpdateHooks []DrugIndicationHook + +var drugIndicationBeforeDeleteHooks []DrugIndicationHook +var drugIndicationAfterDeleteHooks []DrugIndicationHook + +var drugIndicationBeforeUpsertHooks []DrugIndicationHook +var drugIndicationAfterUpsertHooks []DrugIndicationHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *DrugIndication) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range drugIndicationAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *DrugIndication) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range drugIndicationBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *DrugIndication) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range drugIndicationAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *DrugIndication) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range drugIndicationBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *DrugIndication) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range drugIndicationAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *DrugIndication) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range drugIndicationBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *DrugIndication) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range drugIndicationAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *DrugIndication) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range drugIndicationBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *DrugIndication) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range drugIndicationAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddDrugIndicationHook registers your hook function for all future operations. +func AddDrugIndicationHook(hookPoint boil.HookPoint, drugIndicationHook DrugIndicationHook) { + switch hookPoint { + case boil.AfterSelectHook: + drugIndicationAfterSelectHooks = append(drugIndicationAfterSelectHooks, drugIndicationHook) + case boil.BeforeInsertHook: + drugIndicationBeforeInsertHooks = append(drugIndicationBeforeInsertHooks, drugIndicationHook) + case boil.AfterInsertHook: + drugIndicationAfterInsertHooks = append(drugIndicationAfterInsertHooks, drugIndicationHook) + case boil.BeforeUpdateHook: + drugIndicationBeforeUpdateHooks = append(drugIndicationBeforeUpdateHooks, drugIndicationHook) + case boil.AfterUpdateHook: + drugIndicationAfterUpdateHooks = append(drugIndicationAfterUpdateHooks, drugIndicationHook) + case boil.BeforeDeleteHook: + drugIndicationBeforeDeleteHooks = append(drugIndicationBeforeDeleteHooks, drugIndicationHook) + case boil.AfterDeleteHook: + drugIndicationAfterDeleteHooks = append(drugIndicationAfterDeleteHooks, drugIndicationHook) + case boil.BeforeUpsertHook: + drugIndicationBeforeUpsertHooks = append(drugIndicationBeforeUpsertHooks, drugIndicationHook) + case boil.AfterUpsertHook: + drugIndicationAfterUpsertHooks = append(drugIndicationAfterUpsertHooks, drugIndicationHook) + } +} + +// One returns a single drugIndication record from the query. +func (q drugIndicationQuery) One(ctx context.Context, exec boil.ContextExecutor) (*DrugIndication, error) { + o := &DrugIndication{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for drug_indication") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all DrugIndication records from the query. +func (q drugIndicationQuery) All(ctx context.Context, exec boil.ContextExecutor) (DrugIndicationSlice, error) { + var o []*DrugIndication + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to DrugIndication slice") + } + + if len(drugIndicationAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all DrugIndication records in the query. +func (q drugIndicationQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count drug_indication rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q drugIndicationQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if drug_indication exists") + } + + return count > 0, nil +} + +// Record pointed to by the foreign key. +func (o *DrugIndication) Record(mods ...qm.QueryMod) compoundRecordQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"record_id\" = ?", o.RecordID), + } + + queryMods = append(queryMods, mods...) + + return CompoundRecords(queryMods...) +} + +// MolregnoMoleculeDictionary pointed to by the foreign key. +func (o *DrugIndication) MolregnoMoleculeDictionary(mods ...qm.QueryMod) moleculeDictionaryQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"molregno\" = ?", o.Molregno), + } + + queryMods = append(queryMods, mods...) + + return MoleculeDictionaries(queryMods...) +} + +// DrugindIndicationRefs retrieves all the indication_ref's IndicationRefs with an executor via drugind_id column. +func (o *DrugIndication) DrugindIndicationRefs(mods ...qm.QueryMod) indicationRefQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"indication_refs\".\"drugind_id\"=?", o.DrugindID), + ) + + return IndicationRefs(queryMods...) +} + +// LoadRecord allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (drugIndicationL) LoadRecord(ctx context.Context, e boil.ContextExecutor, singular bool, maybeDrugIndication interface{}, mods queries.Applicator) error { + var slice []*DrugIndication + var object *DrugIndication + + if singular { + object = maybeDrugIndication.(*DrugIndication) + } else { + slice = *maybeDrugIndication.(*[]*DrugIndication) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &drugIndicationR{} + } + args = append(args, object.RecordID) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &drugIndicationR{} + } + + for _, a := range args { + if a == obj.RecordID { + continue Outer + } + } + + args = append(args, obj.RecordID) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`compound_records`), + qm.WhereIn(`compound_records.record_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load CompoundRecord") + } + + var resultSlice []*CompoundRecord + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice CompoundRecord") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for compound_records") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for compound_records") + } + + if len(drugIndicationAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Record = foreign + if foreign.R == nil { + foreign.R = &compoundRecordR{} + } + foreign.R.RecordDrugIndications = append(foreign.R.RecordDrugIndications, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.RecordID == foreign.RecordID { + local.R.Record = foreign + if foreign.R == nil { + foreign.R = &compoundRecordR{} + } + foreign.R.RecordDrugIndications = append(foreign.R.RecordDrugIndications, local) + break + } + } + } + + return nil +} + +// LoadMolregnoMoleculeDictionary allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (drugIndicationL) LoadMolregnoMoleculeDictionary(ctx context.Context, e boil.ContextExecutor, singular bool, maybeDrugIndication interface{}, mods queries.Applicator) error { + var slice []*DrugIndication + var object *DrugIndication + + if singular { + object = maybeDrugIndication.(*DrugIndication) + } else { + slice = *maybeDrugIndication.(*[]*DrugIndication) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &drugIndicationR{} + } + if !queries.IsNil(object.Molregno) { + args = append(args, object.Molregno) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &drugIndicationR{} + } + + for _, a := range args { + if queries.Equal(a, obj.Molregno) { + continue Outer + } + } + + if !queries.IsNil(obj.Molregno) { + args = append(args, obj.Molregno) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`molecule_dictionary`), + qm.WhereIn(`molecule_dictionary.molregno in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load MoleculeDictionary") + } + + var resultSlice []*MoleculeDictionary + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice MoleculeDictionary") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for molecule_dictionary") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for molecule_dictionary") + } + + if len(drugIndicationAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.MolregnoMoleculeDictionary = foreign + if foreign.R == nil { + foreign.R = &moleculeDictionaryR{} + } + foreign.R.MolregnoDrugIndications = append(foreign.R.MolregnoDrugIndications, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.Molregno, foreign.Molregno) { + local.R.MolregnoMoleculeDictionary = foreign + if foreign.R == nil { + foreign.R = &moleculeDictionaryR{} + } + foreign.R.MolregnoDrugIndications = append(foreign.R.MolregnoDrugIndications, local) + break + } + } + } + + return nil +} + +// LoadDrugindIndicationRefs allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (drugIndicationL) LoadDrugindIndicationRefs(ctx context.Context, e boil.ContextExecutor, singular bool, maybeDrugIndication interface{}, mods queries.Applicator) error { + var slice []*DrugIndication + var object *DrugIndication + + if singular { + object = maybeDrugIndication.(*DrugIndication) + } else { + slice = *maybeDrugIndication.(*[]*DrugIndication) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &drugIndicationR{} + } + args = append(args, object.DrugindID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &drugIndicationR{} + } + + for _, a := range args { + if a == obj.DrugindID { + continue Outer + } + } + + args = append(args, obj.DrugindID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`indication_refs`), + qm.WhereIn(`indication_refs.drugind_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load indication_refs") + } + + var resultSlice []*IndicationRef + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice indication_refs") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on indication_refs") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for indication_refs") + } + + if len(indicationRefAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.DrugindIndicationRefs = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &indicationRefR{} + } + foreign.R.Drugind = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.DrugindID == foreign.DrugindID { + local.R.DrugindIndicationRefs = append(local.R.DrugindIndicationRefs, foreign) + if foreign.R == nil { + foreign.R = &indicationRefR{} + } + foreign.R.Drugind = local + break + } + } + } + + return nil +} + +// SetRecord of the drugIndication to the related item. +// Sets o.R.Record to related. +// Adds o to related.R.RecordDrugIndications. +func (o *DrugIndication) SetRecord(ctx context.Context, exec boil.ContextExecutor, insert bool, related *CompoundRecord) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"drug_indication\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"record_id"}), + strmangle.WhereClause("\"", "\"", 0, drugIndicationPrimaryKeyColumns), + ) + values := []interface{}{related.RecordID, o.DrugindID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.RecordID = related.RecordID + if o.R == nil { + o.R = &drugIndicationR{ + Record: related, + } + } else { + o.R.Record = related + } + + if related.R == nil { + related.R = &compoundRecordR{ + RecordDrugIndications: DrugIndicationSlice{o}, + } + } else { + related.R.RecordDrugIndications = append(related.R.RecordDrugIndications, o) + } + + return nil +} + +// SetMolregnoMoleculeDictionary of the drugIndication to the related item. +// Sets o.R.MolregnoMoleculeDictionary to related. +// Adds o to related.R.MolregnoDrugIndications. +func (o *DrugIndication) SetMolregnoMoleculeDictionary(ctx context.Context, exec boil.ContextExecutor, insert bool, related *MoleculeDictionary) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"drug_indication\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"molregno"}), + strmangle.WhereClause("\"", "\"", 0, drugIndicationPrimaryKeyColumns), + ) + values := []interface{}{related.Molregno, o.DrugindID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.Molregno, related.Molregno) + if o.R == nil { + o.R = &drugIndicationR{ + MolregnoMoleculeDictionary: related, + } + } else { + o.R.MolregnoMoleculeDictionary = related + } + + if related.R == nil { + related.R = &moleculeDictionaryR{ + MolregnoDrugIndications: DrugIndicationSlice{o}, + } + } else { + related.R.MolregnoDrugIndications = append(related.R.MolregnoDrugIndications, o) + } + + return nil +} + +// RemoveMolregnoMoleculeDictionary relationship. +// Sets o.R.MolregnoMoleculeDictionary to nil. +// Removes o from all passed in related items' relationships struct. +func (o *DrugIndication) RemoveMolregnoMoleculeDictionary(ctx context.Context, exec boil.ContextExecutor, related *MoleculeDictionary) error { + var err error + + queries.SetScanner(&o.Molregno, nil) + if _, err = o.Update(ctx, exec, boil.Whitelist("molregno")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.MolregnoMoleculeDictionary = nil + } + if related == nil || related.R == nil { + return nil + } + + for i, ri := range related.R.MolregnoDrugIndications { + if queries.Equal(o.Molregno, ri.Molregno) { + continue + } + + ln := len(related.R.MolregnoDrugIndications) + if ln > 1 && i < ln-1 { + related.R.MolregnoDrugIndications[i] = related.R.MolregnoDrugIndications[ln-1] + } + related.R.MolregnoDrugIndications = related.R.MolregnoDrugIndications[:ln-1] + break + } + return nil +} + +// AddDrugindIndicationRefs adds the given related objects to the existing relationships +// of the drug_indication, optionally inserting them as new records. +// Appends related to o.R.DrugindIndicationRefs. +// Sets related.R.Drugind appropriately. +func (o *DrugIndication) AddDrugindIndicationRefs(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*IndicationRef) error { + var err error + for _, rel := range related { + if insert { + rel.DrugindID = o.DrugindID + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"indication_refs\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"drugind_id"}), + strmangle.WhereClause("\"", "\"", 0, indicationRefPrimaryKeyColumns), + ) + values := []interface{}{o.DrugindID, rel.IndrefID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.DrugindID = o.DrugindID + } + } + + if o.R == nil { + o.R = &drugIndicationR{ + DrugindIndicationRefs: related, + } + } else { + o.R.DrugindIndicationRefs = append(o.R.DrugindIndicationRefs, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &indicationRefR{ + Drugind: o, + } + } else { + rel.R.Drugind = o + } + } + return nil +} + +// DrugIndications retrieves all the records using an executor. +func DrugIndications(mods ...qm.QueryMod) drugIndicationQuery { + mods = append(mods, qm.From("\"drug_indication\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"drug_indication\".*"}) + } + + return drugIndicationQuery{q} +} + +// FindDrugIndication retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindDrugIndication(ctx context.Context, exec boil.ContextExecutor, drugindID int64, selectCols ...string) (*DrugIndication, error) { + drugIndicationObj := &DrugIndication{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"drug_indication\" where \"drugind_id\"=?", sel, + ) + + q := queries.Raw(query, drugindID) + + err := q.Bind(ctx, exec, drugIndicationObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from drug_indication") + } + + if err = drugIndicationObj.doAfterSelectHooks(ctx, exec); err != nil { + return drugIndicationObj, err + } + + return drugIndicationObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *DrugIndication) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no drug_indication provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(drugIndicationColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + drugIndicationInsertCacheMut.RLock() + cache, cached := drugIndicationInsertCache[key] + drugIndicationInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + drugIndicationAllColumns, + drugIndicationColumnsWithDefault, + drugIndicationColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(drugIndicationType, drugIndicationMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(drugIndicationType, drugIndicationMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"drug_indication\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"drug_indication\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into drug_indication") + } + + if !cached { + drugIndicationInsertCacheMut.Lock() + drugIndicationInsertCache[key] = cache + drugIndicationInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the DrugIndication. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *DrugIndication) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + drugIndicationUpdateCacheMut.RLock() + cache, cached := drugIndicationUpdateCache[key] + drugIndicationUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + drugIndicationAllColumns, + drugIndicationPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update drug_indication, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"drug_indication\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, drugIndicationPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(drugIndicationType, drugIndicationMapping, append(wl, drugIndicationPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update drug_indication row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for drug_indication") + } + + if !cached { + drugIndicationUpdateCacheMut.Lock() + drugIndicationUpdateCache[key] = cache + drugIndicationUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q drugIndicationQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for drug_indication") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for drug_indication") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o DrugIndicationSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), drugIndicationPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"drug_indication\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, drugIndicationPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in drugIndication slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all drugIndication") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *DrugIndication) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no drug_indication provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(drugIndicationColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + drugIndicationUpsertCacheMut.RLock() + cache, cached := drugIndicationUpsertCache[key] + drugIndicationUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + drugIndicationAllColumns, + drugIndicationColumnsWithDefault, + drugIndicationColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + drugIndicationAllColumns, + drugIndicationPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert drug_indication, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(drugIndicationPrimaryKeyColumns)) + copy(conflict, drugIndicationPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"drug_indication\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(drugIndicationType, drugIndicationMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(drugIndicationType, drugIndicationMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert drug_indication") + } + + if !cached { + drugIndicationUpsertCacheMut.Lock() + drugIndicationUpsertCache[key] = cache + drugIndicationUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single DrugIndication record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *DrugIndication) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no DrugIndication provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), drugIndicationPrimaryKeyMapping) + sql := "DELETE FROM \"drug_indication\" WHERE \"drugind_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from drug_indication") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for drug_indication") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q drugIndicationQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no drugIndicationQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from drug_indication") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for drug_indication") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o DrugIndicationSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(drugIndicationBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), drugIndicationPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"drug_indication\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, drugIndicationPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from drugIndication slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for drug_indication") + } + + if len(drugIndicationAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *DrugIndication) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindDrugIndication(ctx, exec, o.DrugindID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *DrugIndicationSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := DrugIndicationSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), drugIndicationPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"drug_indication\".* FROM \"drug_indication\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, drugIndicationPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in DrugIndicationSlice") + } + + *o = slice + + return nil +} + +// DrugIndicationExists checks if the DrugIndication row exists. +func DrugIndicationExists(ctx context.Context, exec boil.ContextExecutor, drugindID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"drug_indication\" where \"drugind_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, drugindID) + } + row := exec.QueryRowContext(ctx, sql, drugindID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if drug_indication exists") + } + + return exists, nil +} diff --git a/models/drug_mechanism.go b/models/drug_mechanism.go new file mode 100644 index 0000000..b360831 --- /dev/null +++ b/models/drug_mechanism.go @@ -0,0 +1,2367 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// DrugMechanism is an object representing the database table. +type DrugMechanism struct { + MecID int64 `boil:"mec_id" json:"mec_id" toml:"mec_id" yaml:"mec_id"` + RecordID int64 `boil:"record_id" json:"record_id" toml:"record_id" yaml:"record_id"` + Molregno null.Int64 `boil:"molregno" json:"molregno,omitempty" toml:"molregno" yaml:"molregno,omitempty"` + MechanismOfAction null.String `boil:"mechanism_of_action" json:"mechanism_of_action,omitempty" toml:"mechanism_of_action" yaml:"mechanism_of_action,omitempty"` + Tid null.Int64 `boil:"tid" json:"tid,omitempty" toml:"tid" yaml:"tid,omitempty"` + SiteID null.Int64 `boil:"site_id" json:"site_id,omitempty" toml:"site_id" yaml:"site_id,omitempty"` + ActionType null.String `boil:"action_type" json:"action_type,omitempty" toml:"action_type" yaml:"action_type,omitempty"` + DirectInteraction null.Int16 `boil:"direct_interaction" json:"direct_interaction,omitempty" toml:"direct_interaction" yaml:"direct_interaction,omitempty"` + MolecularMechanism null.Int16 `boil:"molecular_mechanism" json:"molecular_mechanism,omitempty" toml:"molecular_mechanism" yaml:"molecular_mechanism,omitempty"` + DiseaseEfficacy null.Int16 `boil:"disease_efficacy" json:"disease_efficacy,omitempty" toml:"disease_efficacy" yaml:"disease_efficacy,omitempty"` + MechanismComment null.String `boil:"mechanism_comment" json:"mechanism_comment,omitempty" toml:"mechanism_comment" yaml:"mechanism_comment,omitempty"` + SelectivityComment null.String `boil:"selectivity_comment" json:"selectivity_comment,omitempty" toml:"selectivity_comment" yaml:"selectivity_comment,omitempty"` + BindingSiteComment null.String `boil:"binding_site_comment" json:"binding_site_comment,omitempty" toml:"binding_site_comment" yaml:"binding_site_comment,omitempty"` + VariantID null.Int64 `boil:"variant_id" json:"variant_id,omitempty" toml:"variant_id" yaml:"variant_id,omitempty"` + + R *drugMechanismR `boil:"-" json:"-" toml:"-" yaml:"-"` + L drugMechanismL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var DrugMechanismColumns = struct { + MecID string + RecordID string + Molregno string + MechanismOfAction string + Tid string + SiteID string + ActionType string + DirectInteraction string + MolecularMechanism string + DiseaseEfficacy string + MechanismComment string + SelectivityComment string + BindingSiteComment string + VariantID string +}{ + MecID: "mec_id", + RecordID: "record_id", + Molregno: "molregno", + MechanismOfAction: "mechanism_of_action", + Tid: "tid", + SiteID: "site_id", + ActionType: "action_type", + DirectInteraction: "direct_interaction", + MolecularMechanism: "molecular_mechanism", + DiseaseEfficacy: "disease_efficacy", + MechanismComment: "mechanism_comment", + SelectivityComment: "selectivity_comment", + BindingSiteComment: "binding_site_comment", + VariantID: "variant_id", +} + +var DrugMechanismTableColumns = struct { + MecID string + RecordID string + Molregno string + MechanismOfAction string + Tid string + SiteID string + ActionType string + DirectInteraction string + MolecularMechanism string + DiseaseEfficacy string + MechanismComment string + SelectivityComment string + BindingSiteComment string + VariantID string +}{ + MecID: "drug_mechanism.mec_id", + RecordID: "drug_mechanism.record_id", + Molregno: "drug_mechanism.molregno", + MechanismOfAction: "drug_mechanism.mechanism_of_action", + Tid: "drug_mechanism.tid", + SiteID: "drug_mechanism.site_id", + ActionType: "drug_mechanism.action_type", + DirectInteraction: "drug_mechanism.direct_interaction", + MolecularMechanism: "drug_mechanism.molecular_mechanism", + DiseaseEfficacy: "drug_mechanism.disease_efficacy", + MechanismComment: "drug_mechanism.mechanism_comment", + SelectivityComment: "drug_mechanism.selectivity_comment", + BindingSiteComment: "drug_mechanism.binding_site_comment", + VariantID: "drug_mechanism.variant_id", +} + +// Generated where + +var DrugMechanismWhere = struct { + MecID whereHelperint64 + RecordID whereHelperint64 + Molregno whereHelpernull_Int64 + MechanismOfAction whereHelpernull_String + Tid whereHelpernull_Int64 + SiteID whereHelpernull_Int64 + ActionType whereHelpernull_String + DirectInteraction whereHelpernull_Int16 + MolecularMechanism whereHelpernull_Int16 + DiseaseEfficacy whereHelpernull_Int16 + MechanismComment whereHelpernull_String + SelectivityComment whereHelpernull_String + BindingSiteComment whereHelpernull_String + VariantID whereHelpernull_Int64 +}{ + MecID: whereHelperint64{field: "\"drug_mechanism\".\"mec_id\""}, + RecordID: whereHelperint64{field: "\"drug_mechanism\".\"record_id\""}, + Molregno: whereHelpernull_Int64{field: "\"drug_mechanism\".\"molregno\""}, + MechanismOfAction: whereHelpernull_String{field: "\"drug_mechanism\".\"mechanism_of_action\""}, + Tid: whereHelpernull_Int64{field: "\"drug_mechanism\".\"tid\""}, + SiteID: whereHelpernull_Int64{field: "\"drug_mechanism\".\"site_id\""}, + ActionType: whereHelpernull_String{field: "\"drug_mechanism\".\"action_type\""}, + DirectInteraction: whereHelpernull_Int16{field: "\"drug_mechanism\".\"direct_interaction\""}, + MolecularMechanism: whereHelpernull_Int16{field: "\"drug_mechanism\".\"molecular_mechanism\""}, + DiseaseEfficacy: whereHelpernull_Int16{field: "\"drug_mechanism\".\"disease_efficacy\""}, + MechanismComment: whereHelpernull_String{field: "\"drug_mechanism\".\"mechanism_comment\""}, + SelectivityComment: whereHelpernull_String{field: "\"drug_mechanism\".\"selectivity_comment\""}, + BindingSiteComment: whereHelpernull_String{field: "\"drug_mechanism\".\"binding_site_comment\""}, + VariantID: whereHelpernull_Int64{field: "\"drug_mechanism\".\"variant_id\""}, +} + +// DrugMechanismRels is where relationship names are stored. +var DrugMechanismRels = struct { + Variant string + TidTargetDictionary string + Site string + Record string + MolregnoMoleculeDictionary string + DrugMechanismActionType string + MecMechanismRefs string +}{ + Variant: "Variant", + TidTargetDictionary: "TidTargetDictionary", + Site: "Site", + Record: "Record", + MolregnoMoleculeDictionary: "MolregnoMoleculeDictionary", + DrugMechanismActionType: "DrugMechanismActionType", + MecMechanismRefs: "MecMechanismRefs", +} + +// drugMechanismR is where relationships are stored. +type drugMechanismR struct { + Variant *VariantSequence `boil:"Variant" json:"Variant" toml:"Variant" yaml:"Variant"` + TidTargetDictionary *TargetDictionary `boil:"TidTargetDictionary" json:"TidTargetDictionary" toml:"TidTargetDictionary" yaml:"TidTargetDictionary"` + Site *BindingSite `boil:"Site" json:"Site" toml:"Site" yaml:"Site"` + Record *CompoundRecord `boil:"Record" json:"Record" toml:"Record" yaml:"Record"` + MolregnoMoleculeDictionary *MoleculeDictionary `boil:"MolregnoMoleculeDictionary" json:"MolregnoMoleculeDictionary" toml:"MolregnoMoleculeDictionary" yaml:"MolregnoMoleculeDictionary"` + DrugMechanismActionType *ActionType `boil:"DrugMechanismActionType" json:"DrugMechanismActionType" toml:"DrugMechanismActionType" yaml:"DrugMechanismActionType"` + MecMechanismRefs MechanismRefSlice `boil:"MecMechanismRefs" json:"MecMechanismRefs" toml:"MecMechanismRefs" yaml:"MecMechanismRefs"` +} + +// NewStruct creates a new relationship struct +func (*drugMechanismR) NewStruct() *drugMechanismR { + return &drugMechanismR{} +} + +func (r *drugMechanismR) GetVariant() *VariantSequence { + if r == nil { + return nil + } + return r.Variant +} + +func (r *drugMechanismR) GetTidTargetDictionary() *TargetDictionary { + if r == nil { + return nil + } + return r.TidTargetDictionary +} + +func (r *drugMechanismR) GetSite() *BindingSite { + if r == nil { + return nil + } + return r.Site +} + +func (r *drugMechanismR) GetRecord() *CompoundRecord { + if r == nil { + return nil + } + return r.Record +} + +func (r *drugMechanismR) GetMolregnoMoleculeDictionary() *MoleculeDictionary { + if r == nil { + return nil + } + return r.MolregnoMoleculeDictionary +} + +func (r *drugMechanismR) GetDrugMechanismActionType() *ActionType { + if r == nil { + return nil + } + return r.DrugMechanismActionType +} + +func (r *drugMechanismR) GetMecMechanismRefs() MechanismRefSlice { + if r == nil { + return nil + } + return r.MecMechanismRefs +} + +// drugMechanismL is where Load methods for each relationship are stored. +type drugMechanismL struct{} + +var ( + drugMechanismAllColumns = []string{"mec_id", "record_id", "molregno", "mechanism_of_action", "tid", "site_id", "action_type", "direct_interaction", "molecular_mechanism", "disease_efficacy", "mechanism_comment", "selectivity_comment", "binding_site_comment", "variant_id"} + drugMechanismColumnsWithoutDefault = []string{"mec_id", "record_id"} + drugMechanismColumnsWithDefault = []string{"molregno", "mechanism_of_action", "tid", "site_id", "action_type", "direct_interaction", "molecular_mechanism", "disease_efficacy", "mechanism_comment", "selectivity_comment", "binding_site_comment", "variant_id"} + drugMechanismPrimaryKeyColumns = []string{"mec_id"} + drugMechanismGeneratedColumns = []string{} +) + +type ( + // DrugMechanismSlice is an alias for a slice of pointers to DrugMechanism. + // This should almost always be used instead of []DrugMechanism. + DrugMechanismSlice []*DrugMechanism + // DrugMechanismHook is the signature for custom DrugMechanism hook methods + DrugMechanismHook func(context.Context, boil.ContextExecutor, *DrugMechanism) error + + drugMechanismQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + drugMechanismType = reflect.TypeOf(&DrugMechanism{}) + drugMechanismMapping = queries.MakeStructMapping(drugMechanismType) + drugMechanismPrimaryKeyMapping, _ = queries.BindMapping(drugMechanismType, drugMechanismMapping, drugMechanismPrimaryKeyColumns) + drugMechanismInsertCacheMut sync.RWMutex + drugMechanismInsertCache = make(map[string]insertCache) + drugMechanismUpdateCacheMut sync.RWMutex + drugMechanismUpdateCache = make(map[string]updateCache) + drugMechanismUpsertCacheMut sync.RWMutex + drugMechanismUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var drugMechanismAfterSelectHooks []DrugMechanismHook + +var drugMechanismBeforeInsertHooks []DrugMechanismHook +var drugMechanismAfterInsertHooks []DrugMechanismHook + +var drugMechanismBeforeUpdateHooks []DrugMechanismHook +var drugMechanismAfterUpdateHooks []DrugMechanismHook + +var drugMechanismBeforeDeleteHooks []DrugMechanismHook +var drugMechanismAfterDeleteHooks []DrugMechanismHook + +var drugMechanismBeforeUpsertHooks []DrugMechanismHook +var drugMechanismAfterUpsertHooks []DrugMechanismHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *DrugMechanism) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range drugMechanismAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *DrugMechanism) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range drugMechanismBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *DrugMechanism) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range drugMechanismAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *DrugMechanism) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range drugMechanismBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *DrugMechanism) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range drugMechanismAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *DrugMechanism) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range drugMechanismBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *DrugMechanism) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range drugMechanismAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *DrugMechanism) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range drugMechanismBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *DrugMechanism) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range drugMechanismAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddDrugMechanismHook registers your hook function for all future operations. +func AddDrugMechanismHook(hookPoint boil.HookPoint, drugMechanismHook DrugMechanismHook) { + switch hookPoint { + case boil.AfterSelectHook: + drugMechanismAfterSelectHooks = append(drugMechanismAfterSelectHooks, drugMechanismHook) + case boil.BeforeInsertHook: + drugMechanismBeforeInsertHooks = append(drugMechanismBeforeInsertHooks, drugMechanismHook) + case boil.AfterInsertHook: + drugMechanismAfterInsertHooks = append(drugMechanismAfterInsertHooks, drugMechanismHook) + case boil.BeforeUpdateHook: + drugMechanismBeforeUpdateHooks = append(drugMechanismBeforeUpdateHooks, drugMechanismHook) + case boil.AfterUpdateHook: + drugMechanismAfterUpdateHooks = append(drugMechanismAfterUpdateHooks, drugMechanismHook) + case boil.BeforeDeleteHook: + drugMechanismBeforeDeleteHooks = append(drugMechanismBeforeDeleteHooks, drugMechanismHook) + case boil.AfterDeleteHook: + drugMechanismAfterDeleteHooks = append(drugMechanismAfterDeleteHooks, drugMechanismHook) + case boil.BeforeUpsertHook: + drugMechanismBeforeUpsertHooks = append(drugMechanismBeforeUpsertHooks, drugMechanismHook) + case boil.AfterUpsertHook: + drugMechanismAfterUpsertHooks = append(drugMechanismAfterUpsertHooks, drugMechanismHook) + } +} + +// One returns a single drugMechanism record from the query. +func (q drugMechanismQuery) One(ctx context.Context, exec boil.ContextExecutor) (*DrugMechanism, error) { + o := &DrugMechanism{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for drug_mechanism") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all DrugMechanism records from the query. +func (q drugMechanismQuery) All(ctx context.Context, exec boil.ContextExecutor) (DrugMechanismSlice, error) { + var o []*DrugMechanism + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to DrugMechanism slice") + } + + if len(drugMechanismAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all DrugMechanism records in the query. +func (q drugMechanismQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count drug_mechanism rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q drugMechanismQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if drug_mechanism exists") + } + + return count > 0, nil +} + +// Variant pointed to by the foreign key. +func (o *DrugMechanism) Variant(mods ...qm.QueryMod) variantSequenceQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"variant_id\" = ?", o.VariantID), + } + + queryMods = append(queryMods, mods...) + + return VariantSequences(queryMods...) +} + +// TidTargetDictionary pointed to by the foreign key. +func (o *DrugMechanism) TidTargetDictionary(mods ...qm.QueryMod) targetDictionaryQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"tid\" = ?", o.Tid), + } + + queryMods = append(queryMods, mods...) + + return TargetDictionaries(queryMods...) +} + +// Site pointed to by the foreign key. +func (o *DrugMechanism) Site(mods ...qm.QueryMod) bindingSiteQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"site_id\" = ?", o.SiteID), + } + + queryMods = append(queryMods, mods...) + + return BindingSites(queryMods...) +} + +// Record pointed to by the foreign key. +func (o *DrugMechanism) Record(mods ...qm.QueryMod) compoundRecordQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"record_id\" = ?", o.RecordID), + } + + queryMods = append(queryMods, mods...) + + return CompoundRecords(queryMods...) +} + +// MolregnoMoleculeDictionary pointed to by the foreign key. +func (o *DrugMechanism) MolregnoMoleculeDictionary(mods ...qm.QueryMod) moleculeDictionaryQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"molregno\" = ?", o.Molregno), + } + + queryMods = append(queryMods, mods...) + + return MoleculeDictionaries(queryMods...) +} + +// DrugMechanismActionType pointed to by the foreign key. +func (o *DrugMechanism) DrugMechanismActionType(mods ...qm.QueryMod) actionTypeQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"action_type\" = ?", o.ActionType), + } + + queryMods = append(queryMods, mods...) + + return ActionTypes(queryMods...) +} + +// MecMechanismRefs retrieves all the mechanism_ref's MechanismRefs with an executor via mec_id column. +func (o *DrugMechanism) MecMechanismRefs(mods ...qm.QueryMod) mechanismRefQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"mechanism_refs\".\"mec_id\"=?", o.MecID), + ) + + return MechanismRefs(queryMods...) +} + +// LoadVariant allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (drugMechanismL) LoadVariant(ctx context.Context, e boil.ContextExecutor, singular bool, maybeDrugMechanism interface{}, mods queries.Applicator) error { + var slice []*DrugMechanism + var object *DrugMechanism + + if singular { + object = maybeDrugMechanism.(*DrugMechanism) + } else { + slice = *maybeDrugMechanism.(*[]*DrugMechanism) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &drugMechanismR{} + } + if !queries.IsNil(object.VariantID) { + args = append(args, object.VariantID) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &drugMechanismR{} + } + + for _, a := range args { + if queries.Equal(a, obj.VariantID) { + continue Outer + } + } + + if !queries.IsNil(obj.VariantID) { + args = append(args, obj.VariantID) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`variant_sequences`), + qm.WhereIn(`variant_sequences.variant_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load VariantSequence") + } + + var resultSlice []*VariantSequence + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice VariantSequence") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for variant_sequences") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for variant_sequences") + } + + if len(drugMechanismAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Variant = foreign + if foreign.R == nil { + foreign.R = &variantSequenceR{} + } + foreign.R.VariantDrugMechanisms = append(foreign.R.VariantDrugMechanisms, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.VariantID, foreign.VariantID) { + local.R.Variant = foreign + if foreign.R == nil { + foreign.R = &variantSequenceR{} + } + foreign.R.VariantDrugMechanisms = append(foreign.R.VariantDrugMechanisms, local) + break + } + } + } + + return nil +} + +// LoadTidTargetDictionary allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (drugMechanismL) LoadTidTargetDictionary(ctx context.Context, e boil.ContextExecutor, singular bool, maybeDrugMechanism interface{}, mods queries.Applicator) error { + var slice []*DrugMechanism + var object *DrugMechanism + + if singular { + object = maybeDrugMechanism.(*DrugMechanism) + } else { + slice = *maybeDrugMechanism.(*[]*DrugMechanism) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &drugMechanismR{} + } + if !queries.IsNil(object.Tid) { + args = append(args, object.Tid) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &drugMechanismR{} + } + + for _, a := range args { + if queries.Equal(a, obj.Tid) { + continue Outer + } + } + + if !queries.IsNil(obj.Tid) { + args = append(args, obj.Tid) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`target_dictionary`), + qm.WhereIn(`target_dictionary.tid in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load TargetDictionary") + } + + var resultSlice []*TargetDictionary + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice TargetDictionary") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for target_dictionary") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for target_dictionary") + } + + if len(drugMechanismAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.TidTargetDictionary = foreign + if foreign.R == nil { + foreign.R = &targetDictionaryR{} + } + foreign.R.TidDrugMechanisms = append(foreign.R.TidDrugMechanisms, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.Tid, foreign.Tid) { + local.R.TidTargetDictionary = foreign + if foreign.R == nil { + foreign.R = &targetDictionaryR{} + } + foreign.R.TidDrugMechanisms = append(foreign.R.TidDrugMechanisms, local) + break + } + } + } + + return nil +} + +// LoadSite allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (drugMechanismL) LoadSite(ctx context.Context, e boil.ContextExecutor, singular bool, maybeDrugMechanism interface{}, mods queries.Applicator) error { + var slice []*DrugMechanism + var object *DrugMechanism + + if singular { + object = maybeDrugMechanism.(*DrugMechanism) + } else { + slice = *maybeDrugMechanism.(*[]*DrugMechanism) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &drugMechanismR{} + } + if !queries.IsNil(object.SiteID) { + args = append(args, object.SiteID) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &drugMechanismR{} + } + + for _, a := range args { + if queries.Equal(a, obj.SiteID) { + continue Outer + } + } + + if !queries.IsNil(obj.SiteID) { + args = append(args, obj.SiteID) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`binding_sites`), + qm.WhereIn(`binding_sites.site_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load BindingSite") + } + + var resultSlice []*BindingSite + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice BindingSite") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for binding_sites") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for binding_sites") + } + + if len(drugMechanismAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Site = foreign + if foreign.R == nil { + foreign.R = &bindingSiteR{} + } + foreign.R.SiteDrugMechanisms = append(foreign.R.SiteDrugMechanisms, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.SiteID, foreign.SiteID) { + local.R.Site = foreign + if foreign.R == nil { + foreign.R = &bindingSiteR{} + } + foreign.R.SiteDrugMechanisms = append(foreign.R.SiteDrugMechanisms, local) + break + } + } + } + + return nil +} + +// LoadRecord allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (drugMechanismL) LoadRecord(ctx context.Context, e boil.ContextExecutor, singular bool, maybeDrugMechanism interface{}, mods queries.Applicator) error { + var slice []*DrugMechanism + var object *DrugMechanism + + if singular { + object = maybeDrugMechanism.(*DrugMechanism) + } else { + slice = *maybeDrugMechanism.(*[]*DrugMechanism) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &drugMechanismR{} + } + args = append(args, object.RecordID) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &drugMechanismR{} + } + + for _, a := range args { + if a == obj.RecordID { + continue Outer + } + } + + args = append(args, obj.RecordID) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`compound_records`), + qm.WhereIn(`compound_records.record_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load CompoundRecord") + } + + var resultSlice []*CompoundRecord + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice CompoundRecord") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for compound_records") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for compound_records") + } + + if len(drugMechanismAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Record = foreign + if foreign.R == nil { + foreign.R = &compoundRecordR{} + } + foreign.R.RecordDrugMechanisms = append(foreign.R.RecordDrugMechanisms, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.RecordID == foreign.RecordID { + local.R.Record = foreign + if foreign.R == nil { + foreign.R = &compoundRecordR{} + } + foreign.R.RecordDrugMechanisms = append(foreign.R.RecordDrugMechanisms, local) + break + } + } + } + + return nil +} + +// LoadMolregnoMoleculeDictionary allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (drugMechanismL) LoadMolregnoMoleculeDictionary(ctx context.Context, e boil.ContextExecutor, singular bool, maybeDrugMechanism interface{}, mods queries.Applicator) error { + var slice []*DrugMechanism + var object *DrugMechanism + + if singular { + object = maybeDrugMechanism.(*DrugMechanism) + } else { + slice = *maybeDrugMechanism.(*[]*DrugMechanism) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &drugMechanismR{} + } + if !queries.IsNil(object.Molregno) { + args = append(args, object.Molregno) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &drugMechanismR{} + } + + for _, a := range args { + if queries.Equal(a, obj.Molregno) { + continue Outer + } + } + + if !queries.IsNil(obj.Molregno) { + args = append(args, obj.Molregno) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`molecule_dictionary`), + qm.WhereIn(`molecule_dictionary.molregno in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load MoleculeDictionary") + } + + var resultSlice []*MoleculeDictionary + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice MoleculeDictionary") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for molecule_dictionary") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for molecule_dictionary") + } + + if len(drugMechanismAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.MolregnoMoleculeDictionary = foreign + if foreign.R == nil { + foreign.R = &moleculeDictionaryR{} + } + foreign.R.MolregnoDrugMechanisms = append(foreign.R.MolregnoDrugMechanisms, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.Molregno, foreign.Molregno) { + local.R.MolregnoMoleculeDictionary = foreign + if foreign.R == nil { + foreign.R = &moleculeDictionaryR{} + } + foreign.R.MolregnoDrugMechanisms = append(foreign.R.MolregnoDrugMechanisms, local) + break + } + } + } + + return nil +} + +// LoadDrugMechanismActionType allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (drugMechanismL) LoadDrugMechanismActionType(ctx context.Context, e boil.ContextExecutor, singular bool, maybeDrugMechanism interface{}, mods queries.Applicator) error { + var slice []*DrugMechanism + var object *DrugMechanism + + if singular { + object = maybeDrugMechanism.(*DrugMechanism) + } else { + slice = *maybeDrugMechanism.(*[]*DrugMechanism) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &drugMechanismR{} + } + if !queries.IsNil(object.ActionType) { + args = append(args, object.ActionType) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &drugMechanismR{} + } + + for _, a := range args { + if queries.Equal(a, obj.ActionType) { + continue Outer + } + } + + if !queries.IsNil(obj.ActionType) { + args = append(args, obj.ActionType) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`action_type`), + qm.WhereIn(`action_type.action_type in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load ActionType") + } + + var resultSlice []*ActionType + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice ActionType") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for action_type") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for action_type") + } + + if len(drugMechanismAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.DrugMechanismActionType = foreign + if foreign.R == nil { + foreign.R = &actionTypeR{} + } + foreign.R.DrugMechanisms = append(foreign.R.DrugMechanisms, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.ActionType, foreign.ActionType) { + local.R.DrugMechanismActionType = foreign + if foreign.R == nil { + foreign.R = &actionTypeR{} + } + foreign.R.DrugMechanisms = append(foreign.R.DrugMechanisms, local) + break + } + } + } + + return nil +} + +// LoadMecMechanismRefs allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (drugMechanismL) LoadMecMechanismRefs(ctx context.Context, e boil.ContextExecutor, singular bool, maybeDrugMechanism interface{}, mods queries.Applicator) error { + var slice []*DrugMechanism + var object *DrugMechanism + + if singular { + object = maybeDrugMechanism.(*DrugMechanism) + } else { + slice = *maybeDrugMechanism.(*[]*DrugMechanism) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &drugMechanismR{} + } + args = append(args, object.MecID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &drugMechanismR{} + } + + for _, a := range args { + if a == obj.MecID { + continue Outer + } + } + + args = append(args, obj.MecID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`mechanism_refs`), + qm.WhereIn(`mechanism_refs.mec_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load mechanism_refs") + } + + var resultSlice []*MechanismRef + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice mechanism_refs") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on mechanism_refs") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for mechanism_refs") + } + + if len(mechanismRefAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.MecMechanismRefs = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &mechanismRefR{} + } + foreign.R.Mec = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.MecID == foreign.MecID { + local.R.MecMechanismRefs = append(local.R.MecMechanismRefs, foreign) + if foreign.R == nil { + foreign.R = &mechanismRefR{} + } + foreign.R.Mec = local + break + } + } + } + + return nil +} + +// SetVariant of the drugMechanism to the related item. +// Sets o.R.Variant to related. +// Adds o to related.R.VariantDrugMechanisms. +func (o *DrugMechanism) SetVariant(ctx context.Context, exec boil.ContextExecutor, insert bool, related *VariantSequence) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"drug_mechanism\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"variant_id"}), + strmangle.WhereClause("\"", "\"", 0, drugMechanismPrimaryKeyColumns), + ) + values := []interface{}{related.VariantID, o.MecID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.VariantID, related.VariantID) + if o.R == nil { + o.R = &drugMechanismR{ + Variant: related, + } + } else { + o.R.Variant = related + } + + if related.R == nil { + related.R = &variantSequenceR{ + VariantDrugMechanisms: DrugMechanismSlice{o}, + } + } else { + related.R.VariantDrugMechanisms = append(related.R.VariantDrugMechanisms, o) + } + + return nil +} + +// RemoveVariant relationship. +// Sets o.R.Variant to nil. +// Removes o from all passed in related items' relationships struct. +func (o *DrugMechanism) RemoveVariant(ctx context.Context, exec boil.ContextExecutor, related *VariantSequence) error { + var err error + + queries.SetScanner(&o.VariantID, nil) + if _, err = o.Update(ctx, exec, boil.Whitelist("variant_id")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.Variant = nil + } + if related == nil || related.R == nil { + return nil + } + + for i, ri := range related.R.VariantDrugMechanisms { + if queries.Equal(o.VariantID, ri.VariantID) { + continue + } + + ln := len(related.R.VariantDrugMechanisms) + if ln > 1 && i < ln-1 { + related.R.VariantDrugMechanisms[i] = related.R.VariantDrugMechanisms[ln-1] + } + related.R.VariantDrugMechanisms = related.R.VariantDrugMechanisms[:ln-1] + break + } + return nil +} + +// SetTidTargetDictionary of the drugMechanism to the related item. +// Sets o.R.TidTargetDictionary to related. +// Adds o to related.R.TidDrugMechanisms. +func (o *DrugMechanism) SetTidTargetDictionary(ctx context.Context, exec boil.ContextExecutor, insert bool, related *TargetDictionary) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"drug_mechanism\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"tid"}), + strmangle.WhereClause("\"", "\"", 0, drugMechanismPrimaryKeyColumns), + ) + values := []interface{}{related.Tid, o.MecID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.Tid, related.Tid) + if o.R == nil { + o.R = &drugMechanismR{ + TidTargetDictionary: related, + } + } else { + o.R.TidTargetDictionary = related + } + + if related.R == nil { + related.R = &targetDictionaryR{ + TidDrugMechanisms: DrugMechanismSlice{o}, + } + } else { + related.R.TidDrugMechanisms = append(related.R.TidDrugMechanisms, o) + } + + return nil +} + +// RemoveTidTargetDictionary relationship. +// Sets o.R.TidTargetDictionary to nil. +// Removes o from all passed in related items' relationships struct. +func (o *DrugMechanism) RemoveTidTargetDictionary(ctx context.Context, exec boil.ContextExecutor, related *TargetDictionary) error { + var err error + + queries.SetScanner(&o.Tid, nil) + if _, err = o.Update(ctx, exec, boil.Whitelist("tid")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.TidTargetDictionary = nil + } + if related == nil || related.R == nil { + return nil + } + + for i, ri := range related.R.TidDrugMechanisms { + if queries.Equal(o.Tid, ri.Tid) { + continue + } + + ln := len(related.R.TidDrugMechanisms) + if ln > 1 && i < ln-1 { + related.R.TidDrugMechanisms[i] = related.R.TidDrugMechanisms[ln-1] + } + related.R.TidDrugMechanisms = related.R.TidDrugMechanisms[:ln-1] + break + } + return nil +} + +// SetSite of the drugMechanism to the related item. +// Sets o.R.Site to related. +// Adds o to related.R.SiteDrugMechanisms. +func (o *DrugMechanism) SetSite(ctx context.Context, exec boil.ContextExecutor, insert bool, related *BindingSite) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"drug_mechanism\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"site_id"}), + strmangle.WhereClause("\"", "\"", 0, drugMechanismPrimaryKeyColumns), + ) + values := []interface{}{related.SiteID, o.MecID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.SiteID, related.SiteID) + if o.R == nil { + o.R = &drugMechanismR{ + Site: related, + } + } else { + o.R.Site = related + } + + if related.R == nil { + related.R = &bindingSiteR{ + SiteDrugMechanisms: DrugMechanismSlice{o}, + } + } else { + related.R.SiteDrugMechanisms = append(related.R.SiteDrugMechanisms, o) + } + + return nil +} + +// RemoveSite relationship. +// Sets o.R.Site to nil. +// Removes o from all passed in related items' relationships struct. +func (o *DrugMechanism) RemoveSite(ctx context.Context, exec boil.ContextExecutor, related *BindingSite) error { + var err error + + queries.SetScanner(&o.SiteID, nil) + if _, err = o.Update(ctx, exec, boil.Whitelist("site_id")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.Site = nil + } + if related == nil || related.R == nil { + return nil + } + + for i, ri := range related.R.SiteDrugMechanisms { + if queries.Equal(o.SiteID, ri.SiteID) { + continue + } + + ln := len(related.R.SiteDrugMechanisms) + if ln > 1 && i < ln-1 { + related.R.SiteDrugMechanisms[i] = related.R.SiteDrugMechanisms[ln-1] + } + related.R.SiteDrugMechanisms = related.R.SiteDrugMechanisms[:ln-1] + break + } + return nil +} + +// SetRecord of the drugMechanism to the related item. +// Sets o.R.Record to related. +// Adds o to related.R.RecordDrugMechanisms. +func (o *DrugMechanism) SetRecord(ctx context.Context, exec boil.ContextExecutor, insert bool, related *CompoundRecord) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"drug_mechanism\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"record_id"}), + strmangle.WhereClause("\"", "\"", 0, drugMechanismPrimaryKeyColumns), + ) + values := []interface{}{related.RecordID, o.MecID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.RecordID = related.RecordID + if o.R == nil { + o.R = &drugMechanismR{ + Record: related, + } + } else { + o.R.Record = related + } + + if related.R == nil { + related.R = &compoundRecordR{ + RecordDrugMechanisms: DrugMechanismSlice{o}, + } + } else { + related.R.RecordDrugMechanisms = append(related.R.RecordDrugMechanisms, o) + } + + return nil +} + +// SetMolregnoMoleculeDictionary of the drugMechanism to the related item. +// Sets o.R.MolregnoMoleculeDictionary to related. +// Adds o to related.R.MolregnoDrugMechanisms. +func (o *DrugMechanism) SetMolregnoMoleculeDictionary(ctx context.Context, exec boil.ContextExecutor, insert bool, related *MoleculeDictionary) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"drug_mechanism\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"molregno"}), + strmangle.WhereClause("\"", "\"", 0, drugMechanismPrimaryKeyColumns), + ) + values := []interface{}{related.Molregno, o.MecID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.Molregno, related.Molregno) + if o.R == nil { + o.R = &drugMechanismR{ + MolregnoMoleculeDictionary: related, + } + } else { + o.R.MolregnoMoleculeDictionary = related + } + + if related.R == nil { + related.R = &moleculeDictionaryR{ + MolregnoDrugMechanisms: DrugMechanismSlice{o}, + } + } else { + related.R.MolregnoDrugMechanisms = append(related.R.MolregnoDrugMechanisms, o) + } + + return nil +} + +// RemoveMolregnoMoleculeDictionary relationship. +// Sets o.R.MolregnoMoleculeDictionary to nil. +// Removes o from all passed in related items' relationships struct. +func (o *DrugMechanism) RemoveMolregnoMoleculeDictionary(ctx context.Context, exec boil.ContextExecutor, related *MoleculeDictionary) error { + var err error + + queries.SetScanner(&o.Molregno, nil) + if _, err = o.Update(ctx, exec, boil.Whitelist("molregno")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.MolregnoMoleculeDictionary = nil + } + if related == nil || related.R == nil { + return nil + } + + for i, ri := range related.R.MolregnoDrugMechanisms { + if queries.Equal(o.Molregno, ri.Molregno) { + continue + } + + ln := len(related.R.MolregnoDrugMechanisms) + if ln > 1 && i < ln-1 { + related.R.MolregnoDrugMechanisms[i] = related.R.MolregnoDrugMechanisms[ln-1] + } + related.R.MolregnoDrugMechanisms = related.R.MolregnoDrugMechanisms[:ln-1] + break + } + return nil +} + +// SetDrugMechanismActionType of the drugMechanism to the related item. +// Sets o.R.DrugMechanismActionType to related. +// Adds o to related.R.DrugMechanisms. +func (o *DrugMechanism) SetDrugMechanismActionType(ctx context.Context, exec boil.ContextExecutor, insert bool, related *ActionType) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"drug_mechanism\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"action_type"}), + strmangle.WhereClause("\"", "\"", 0, drugMechanismPrimaryKeyColumns), + ) + values := []interface{}{related.ActionType, o.MecID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.ActionType, related.ActionType) + if o.R == nil { + o.R = &drugMechanismR{ + DrugMechanismActionType: related, + } + } else { + o.R.DrugMechanismActionType = related + } + + if related.R == nil { + related.R = &actionTypeR{ + DrugMechanisms: DrugMechanismSlice{o}, + } + } else { + related.R.DrugMechanisms = append(related.R.DrugMechanisms, o) + } + + return nil +} + +// RemoveDrugMechanismActionType relationship. +// Sets o.R.DrugMechanismActionType to nil. +// Removes o from all passed in related items' relationships struct. +func (o *DrugMechanism) RemoveDrugMechanismActionType(ctx context.Context, exec boil.ContextExecutor, related *ActionType) error { + var err error + + queries.SetScanner(&o.ActionType, nil) + if _, err = o.Update(ctx, exec, boil.Whitelist("action_type")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.DrugMechanismActionType = nil + } + if related == nil || related.R == nil { + return nil + } + + for i, ri := range related.R.DrugMechanisms { + if queries.Equal(o.ActionType, ri.ActionType) { + continue + } + + ln := len(related.R.DrugMechanisms) + if ln > 1 && i < ln-1 { + related.R.DrugMechanisms[i] = related.R.DrugMechanisms[ln-1] + } + related.R.DrugMechanisms = related.R.DrugMechanisms[:ln-1] + break + } + return nil +} + +// AddMecMechanismRefs adds the given related objects to the existing relationships +// of the drug_mechanism, optionally inserting them as new records. +// Appends related to o.R.MecMechanismRefs. +// Sets related.R.Mec appropriately. +func (o *DrugMechanism) AddMecMechanismRefs(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*MechanismRef) error { + var err error + for _, rel := range related { + if insert { + rel.MecID = o.MecID + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"mechanism_refs\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"mec_id"}), + strmangle.WhereClause("\"", "\"", 0, mechanismRefPrimaryKeyColumns), + ) + values := []interface{}{o.MecID, rel.MecrefID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.MecID = o.MecID + } + } + + if o.R == nil { + o.R = &drugMechanismR{ + MecMechanismRefs: related, + } + } else { + o.R.MecMechanismRefs = append(o.R.MecMechanismRefs, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &mechanismRefR{ + Mec: o, + } + } else { + rel.R.Mec = o + } + } + return nil +} + +// DrugMechanisms retrieves all the records using an executor. +func DrugMechanisms(mods ...qm.QueryMod) drugMechanismQuery { + mods = append(mods, qm.From("\"drug_mechanism\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"drug_mechanism\".*"}) + } + + return drugMechanismQuery{q} +} + +// FindDrugMechanism retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindDrugMechanism(ctx context.Context, exec boil.ContextExecutor, mecID int64, selectCols ...string) (*DrugMechanism, error) { + drugMechanismObj := &DrugMechanism{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"drug_mechanism\" where \"mec_id\"=?", sel, + ) + + q := queries.Raw(query, mecID) + + err := q.Bind(ctx, exec, drugMechanismObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from drug_mechanism") + } + + if err = drugMechanismObj.doAfterSelectHooks(ctx, exec); err != nil { + return drugMechanismObj, err + } + + return drugMechanismObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *DrugMechanism) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no drug_mechanism provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(drugMechanismColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + drugMechanismInsertCacheMut.RLock() + cache, cached := drugMechanismInsertCache[key] + drugMechanismInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + drugMechanismAllColumns, + drugMechanismColumnsWithDefault, + drugMechanismColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(drugMechanismType, drugMechanismMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(drugMechanismType, drugMechanismMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"drug_mechanism\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"drug_mechanism\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into drug_mechanism") + } + + if !cached { + drugMechanismInsertCacheMut.Lock() + drugMechanismInsertCache[key] = cache + drugMechanismInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the DrugMechanism. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *DrugMechanism) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + drugMechanismUpdateCacheMut.RLock() + cache, cached := drugMechanismUpdateCache[key] + drugMechanismUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + drugMechanismAllColumns, + drugMechanismPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update drug_mechanism, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"drug_mechanism\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, drugMechanismPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(drugMechanismType, drugMechanismMapping, append(wl, drugMechanismPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update drug_mechanism row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for drug_mechanism") + } + + if !cached { + drugMechanismUpdateCacheMut.Lock() + drugMechanismUpdateCache[key] = cache + drugMechanismUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q drugMechanismQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for drug_mechanism") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for drug_mechanism") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o DrugMechanismSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), drugMechanismPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"drug_mechanism\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, drugMechanismPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in drugMechanism slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all drugMechanism") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *DrugMechanism) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no drug_mechanism provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(drugMechanismColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + drugMechanismUpsertCacheMut.RLock() + cache, cached := drugMechanismUpsertCache[key] + drugMechanismUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + drugMechanismAllColumns, + drugMechanismColumnsWithDefault, + drugMechanismColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + drugMechanismAllColumns, + drugMechanismPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert drug_mechanism, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(drugMechanismPrimaryKeyColumns)) + copy(conflict, drugMechanismPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"drug_mechanism\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(drugMechanismType, drugMechanismMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(drugMechanismType, drugMechanismMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert drug_mechanism") + } + + if !cached { + drugMechanismUpsertCacheMut.Lock() + drugMechanismUpsertCache[key] = cache + drugMechanismUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single DrugMechanism record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *DrugMechanism) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no DrugMechanism provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), drugMechanismPrimaryKeyMapping) + sql := "DELETE FROM \"drug_mechanism\" WHERE \"mec_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from drug_mechanism") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for drug_mechanism") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q drugMechanismQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no drugMechanismQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from drug_mechanism") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for drug_mechanism") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o DrugMechanismSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(drugMechanismBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), drugMechanismPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"drug_mechanism\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, drugMechanismPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from drugMechanism slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for drug_mechanism") + } + + if len(drugMechanismAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *DrugMechanism) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindDrugMechanism(ctx, exec, o.MecID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *DrugMechanismSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := DrugMechanismSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), drugMechanismPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"drug_mechanism\".* FROM \"drug_mechanism\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, drugMechanismPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in DrugMechanismSlice") + } + + *o = slice + + return nil +} + +// DrugMechanismExists checks if the DrugMechanism row exists. +func DrugMechanismExists(ctx context.Context, exec boil.ContextExecutor, mecID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"drug_mechanism\" where \"mec_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, mecID) + } + row := exec.QueryRowContext(ctx, sql, mecID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if drug_mechanism exists") + } + + return exists, nil +} diff --git a/models/drug_warning.go b/models/drug_warning.go new file mode 100644 index 0000000..ab4e112 --- /dev/null +++ b/models/drug_warning.go @@ -0,0 +1,1391 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// DrugWarning is an object representing the database table. +type DrugWarning struct { + WarningID int64 `boil:"warning_id" json:"warning_id" toml:"warning_id" yaml:"warning_id"` + RecordID null.Int64 `boil:"record_id" json:"record_id,omitempty" toml:"record_id" yaml:"record_id,omitempty"` + Molregno null.Int64 `boil:"molregno" json:"molregno,omitempty" toml:"molregno" yaml:"molregno,omitempty"` + WarningType null.String `boil:"warning_type" json:"warning_type,omitempty" toml:"warning_type" yaml:"warning_type,omitempty"` + WarningClass null.String `boil:"warning_class" json:"warning_class,omitempty" toml:"warning_class" yaml:"warning_class,omitempty"` + WarningDescription null.String `boil:"warning_description" json:"warning_description,omitempty" toml:"warning_description" yaml:"warning_description,omitempty"` + WarningCountry null.String `boil:"warning_country" json:"warning_country,omitempty" toml:"warning_country" yaml:"warning_country,omitempty"` + WarningYear null.Int64 `boil:"warning_year" json:"warning_year,omitempty" toml:"warning_year" yaml:"warning_year,omitempty"` + + R *drugWarningR `boil:"-" json:"-" toml:"-" yaml:"-"` + L drugWarningL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var DrugWarningColumns = struct { + WarningID string + RecordID string + Molregno string + WarningType string + WarningClass string + WarningDescription string + WarningCountry string + WarningYear string +}{ + WarningID: "warning_id", + RecordID: "record_id", + Molregno: "molregno", + WarningType: "warning_type", + WarningClass: "warning_class", + WarningDescription: "warning_description", + WarningCountry: "warning_country", + WarningYear: "warning_year", +} + +var DrugWarningTableColumns = struct { + WarningID string + RecordID string + Molregno string + WarningType string + WarningClass string + WarningDescription string + WarningCountry string + WarningYear string +}{ + WarningID: "drug_warning.warning_id", + RecordID: "drug_warning.record_id", + Molregno: "drug_warning.molregno", + WarningType: "drug_warning.warning_type", + WarningClass: "drug_warning.warning_class", + WarningDescription: "drug_warning.warning_description", + WarningCountry: "drug_warning.warning_country", + WarningYear: "drug_warning.warning_year", +} + +// Generated where + +var DrugWarningWhere = struct { + WarningID whereHelperint64 + RecordID whereHelpernull_Int64 + Molregno whereHelpernull_Int64 + WarningType whereHelpernull_String + WarningClass whereHelpernull_String + WarningDescription whereHelpernull_String + WarningCountry whereHelpernull_String + WarningYear whereHelpernull_Int64 +}{ + WarningID: whereHelperint64{field: "\"drug_warning\".\"warning_id\""}, + RecordID: whereHelpernull_Int64{field: "\"drug_warning\".\"record_id\""}, + Molregno: whereHelpernull_Int64{field: "\"drug_warning\".\"molregno\""}, + WarningType: whereHelpernull_String{field: "\"drug_warning\".\"warning_type\""}, + WarningClass: whereHelpernull_String{field: "\"drug_warning\".\"warning_class\""}, + WarningDescription: whereHelpernull_String{field: "\"drug_warning\".\"warning_description\""}, + WarningCountry: whereHelpernull_String{field: "\"drug_warning\".\"warning_country\""}, + WarningYear: whereHelpernull_Int64{field: "\"drug_warning\".\"warning_year\""}, +} + +// DrugWarningRels is where relationship names are stored. +var DrugWarningRels = struct { + Record string + WarningWarningRefs string +}{ + Record: "Record", + WarningWarningRefs: "WarningWarningRefs", +} + +// drugWarningR is where relationships are stored. +type drugWarningR struct { + Record *CompoundRecord `boil:"Record" json:"Record" toml:"Record" yaml:"Record"` + WarningWarningRefs WarningRefSlice `boil:"WarningWarningRefs" json:"WarningWarningRefs" toml:"WarningWarningRefs" yaml:"WarningWarningRefs"` +} + +// NewStruct creates a new relationship struct +func (*drugWarningR) NewStruct() *drugWarningR { + return &drugWarningR{} +} + +func (r *drugWarningR) GetRecord() *CompoundRecord { + if r == nil { + return nil + } + return r.Record +} + +func (r *drugWarningR) GetWarningWarningRefs() WarningRefSlice { + if r == nil { + return nil + } + return r.WarningWarningRefs +} + +// drugWarningL is where Load methods for each relationship are stored. +type drugWarningL struct{} + +var ( + drugWarningAllColumns = []string{"warning_id", "record_id", "molregno", "warning_type", "warning_class", "warning_description", "warning_country", "warning_year"} + drugWarningColumnsWithoutDefault = []string{"warning_id"} + drugWarningColumnsWithDefault = []string{"record_id", "molregno", "warning_type", "warning_class", "warning_description", "warning_country", "warning_year"} + drugWarningPrimaryKeyColumns = []string{"warning_id"} + drugWarningGeneratedColumns = []string{} +) + +type ( + // DrugWarningSlice is an alias for a slice of pointers to DrugWarning. + // This should almost always be used instead of []DrugWarning. + DrugWarningSlice []*DrugWarning + // DrugWarningHook is the signature for custom DrugWarning hook methods + DrugWarningHook func(context.Context, boil.ContextExecutor, *DrugWarning) error + + drugWarningQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + drugWarningType = reflect.TypeOf(&DrugWarning{}) + drugWarningMapping = queries.MakeStructMapping(drugWarningType) + drugWarningPrimaryKeyMapping, _ = queries.BindMapping(drugWarningType, drugWarningMapping, drugWarningPrimaryKeyColumns) + drugWarningInsertCacheMut sync.RWMutex + drugWarningInsertCache = make(map[string]insertCache) + drugWarningUpdateCacheMut sync.RWMutex + drugWarningUpdateCache = make(map[string]updateCache) + drugWarningUpsertCacheMut sync.RWMutex + drugWarningUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var drugWarningAfterSelectHooks []DrugWarningHook + +var drugWarningBeforeInsertHooks []DrugWarningHook +var drugWarningAfterInsertHooks []DrugWarningHook + +var drugWarningBeforeUpdateHooks []DrugWarningHook +var drugWarningAfterUpdateHooks []DrugWarningHook + +var drugWarningBeforeDeleteHooks []DrugWarningHook +var drugWarningAfterDeleteHooks []DrugWarningHook + +var drugWarningBeforeUpsertHooks []DrugWarningHook +var drugWarningAfterUpsertHooks []DrugWarningHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *DrugWarning) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range drugWarningAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *DrugWarning) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range drugWarningBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *DrugWarning) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range drugWarningAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *DrugWarning) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range drugWarningBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *DrugWarning) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range drugWarningAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *DrugWarning) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range drugWarningBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *DrugWarning) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range drugWarningAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *DrugWarning) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range drugWarningBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *DrugWarning) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range drugWarningAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddDrugWarningHook registers your hook function for all future operations. +func AddDrugWarningHook(hookPoint boil.HookPoint, drugWarningHook DrugWarningHook) { + switch hookPoint { + case boil.AfterSelectHook: + drugWarningAfterSelectHooks = append(drugWarningAfterSelectHooks, drugWarningHook) + case boil.BeforeInsertHook: + drugWarningBeforeInsertHooks = append(drugWarningBeforeInsertHooks, drugWarningHook) + case boil.AfterInsertHook: + drugWarningAfterInsertHooks = append(drugWarningAfterInsertHooks, drugWarningHook) + case boil.BeforeUpdateHook: + drugWarningBeforeUpdateHooks = append(drugWarningBeforeUpdateHooks, drugWarningHook) + case boil.AfterUpdateHook: + drugWarningAfterUpdateHooks = append(drugWarningAfterUpdateHooks, drugWarningHook) + case boil.BeforeDeleteHook: + drugWarningBeforeDeleteHooks = append(drugWarningBeforeDeleteHooks, drugWarningHook) + case boil.AfterDeleteHook: + drugWarningAfterDeleteHooks = append(drugWarningAfterDeleteHooks, drugWarningHook) + case boil.BeforeUpsertHook: + drugWarningBeforeUpsertHooks = append(drugWarningBeforeUpsertHooks, drugWarningHook) + case boil.AfterUpsertHook: + drugWarningAfterUpsertHooks = append(drugWarningAfterUpsertHooks, drugWarningHook) + } +} + +// One returns a single drugWarning record from the query. +func (q drugWarningQuery) One(ctx context.Context, exec boil.ContextExecutor) (*DrugWarning, error) { + o := &DrugWarning{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for drug_warning") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all DrugWarning records from the query. +func (q drugWarningQuery) All(ctx context.Context, exec boil.ContextExecutor) (DrugWarningSlice, error) { + var o []*DrugWarning + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to DrugWarning slice") + } + + if len(drugWarningAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all DrugWarning records in the query. +func (q drugWarningQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count drug_warning rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q drugWarningQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if drug_warning exists") + } + + return count > 0, nil +} + +// Record pointed to by the foreign key. +func (o *DrugWarning) Record(mods ...qm.QueryMod) compoundRecordQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"record_id\" = ?", o.RecordID), + } + + queryMods = append(queryMods, mods...) + + return CompoundRecords(queryMods...) +} + +// WarningWarningRefs retrieves all the warning_ref's WarningRefs with an executor via warning_id column. +func (o *DrugWarning) WarningWarningRefs(mods ...qm.QueryMod) warningRefQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"warning_refs\".\"warning_id\"=?", o.WarningID), + ) + + return WarningRefs(queryMods...) +} + +// LoadRecord allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (drugWarningL) LoadRecord(ctx context.Context, e boil.ContextExecutor, singular bool, maybeDrugWarning interface{}, mods queries.Applicator) error { + var slice []*DrugWarning + var object *DrugWarning + + if singular { + object = maybeDrugWarning.(*DrugWarning) + } else { + slice = *maybeDrugWarning.(*[]*DrugWarning) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &drugWarningR{} + } + if !queries.IsNil(object.RecordID) { + args = append(args, object.RecordID) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &drugWarningR{} + } + + for _, a := range args { + if queries.Equal(a, obj.RecordID) { + continue Outer + } + } + + if !queries.IsNil(obj.RecordID) { + args = append(args, obj.RecordID) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`compound_records`), + qm.WhereIn(`compound_records.record_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load CompoundRecord") + } + + var resultSlice []*CompoundRecord + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice CompoundRecord") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for compound_records") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for compound_records") + } + + if len(drugWarningAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Record = foreign + if foreign.R == nil { + foreign.R = &compoundRecordR{} + } + foreign.R.RecordDrugWarnings = append(foreign.R.RecordDrugWarnings, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.RecordID, foreign.RecordID) { + local.R.Record = foreign + if foreign.R == nil { + foreign.R = &compoundRecordR{} + } + foreign.R.RecordDrugWarnings = append(foreign.R.RecordDrugWarnings, local) + break + } + } + } + + return nil +} + +// LoadWarningWarningRefs allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (drugWarningL) LoadWarningWarningRefs(ctx context.Context, e boil.ContextExecutor, singular bool, maybeDrugWarning interface{}, mods queries.Applicator) error { + var slice []*DrugWarning + var object *DrugWarning + + if singular { + object = maybeDrugWarning.(*DrugWarning) + } else { + slice = *maybeDrugWarning.(*[]*DrugWarning) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &drugWarningR{} + } + args = append(args, object.WarningID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &drugWarningR{} + } + + for _, a := range args { + if queries.Equal(a, obj.WarningID) { + continue Outer + } + } + + args = append(args, obj.WarningID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`warning_refs`), + qm.WhereIn(`warning_refs.warning_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load warning_refs") + } + + var resultSlice []*WarningRef + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice warning_refs") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on warning_refs") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for warning_refs") + } + + if len(warningRefAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.WarningWarningRefs = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &warningRefR{} + } + foreign.R.Warning = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.WarningID, foreign.WarningID) { + local.R.WarningWarningRefs = append(local.R.WarningWarningRefs, foreign) + if foreign.R == nil { + foreign.R = &warningRefR{} + } + foreign.R.Warning = local + break + } + } + } + + return nil +} + +// SetRecord of the drugWarning to the related item. +// Sets o.R.Record to related. +// Adds o to related.R.RecordDrugWarnings. +func (o *DrugWarning) SetRecord(ctx context.Context, exec boil.ContextExecutor, insert bool, related *CompoundRecord) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"drug_warning\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"record_id"}), + strmangle.WhereClause("\"", "\"", 0, drugWarningPrimaryKeyColumns), + ) + values := []interface{}{related.RecordID, o.WarningID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.RecordID, related.RecordID) + if o.R == nil { + o.R = &drugWarningR{ + Record: related, + } + } else { + o.R.Record = related + } + + if related.R == nil { + related.R = &compoundRecordR{ + RecordDrugWarnings: DrugWarningSlice{o}, + } + } else { + related.R.RecordDrugWarnings = append(related.R.RecordDrugWarnings, o) + } + + return nil +} + +// RemoveRecord relationship. +// Sets o.R.Record to nil. +// Removes o from all passed in related items' relationships struct. +func (o *DrugWarning) RemoveRecord(ctx context.Context, exec boil.ContextExecutor, related *CompoundRecord) error { + var err error + + queries.SetScanner(&o.RecordID, nil) + if _, err = o.Update(ctx, exec, boil.Whitelist("record_id")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.Record = nil + } + if related == nil || related.R == nil { + return nil + } + + for i, ri := range related.R.RecordDrugWarnings { + if queries.Equal(o.RecordID, ri.RecordID) { + continue + } + + ln := len(related.R.RecordDrugWarnings) + if ln > 1 && i < ln-1 { + related.R.RecordDrugWarnings[i] = related.R.RecordDrugWarnings[ln-1] + } + related.R.RecordDrugWarnings = related.R.RecordDrugWarnings[:ln-1] + break + } + return nil +} + +// AddWarningWarningRefs adds the given related objects to the existing relationships +// of the drug_warning, optionally inserting them as new records. +// Appends related to o.R.WarningWarningRefs. +// Sets related.R.Warning appropriately. +func (o *DrugWarning) AddWarningWarningRefs(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*WarningRef) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.WarningID, o.WarningID) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"warning_refs\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"warning_id"}), + strmangle.WhereClause("\"", "\"", 0, warningRefPrimaryKeyColumns), + ) + values := []interface{}{o.WarningID, rel.WarnrefID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.WarningID, o.WarningID) + } + } + + if o.R == nil { + o.R = &drugWarningR{ + WarningWarningRefs: related, + } + } else { + o.R.WarningWarningRefs = append(o.R.WarningWarningRefs, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &warningRefR{ + Warning: o, + } + } else { + rel.R.Warning = o + } + } + return nil +} + +// SetWarningWarningRefs removes all previously related items of the +// drug_warning replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.Warning's WarningWarningRefs accordingly. +// Replaces o.R.WarningWarningRefs with related. +// Sets related.R.Warning's WarningWarningRefs accordingly. +func (o *DrugWarning) SetWarningWarningRefs(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*WarningRef) error { + query := "update \"warning_refs\" set \"warning_id\" = null where \"warning_id\" = ?" + values := []interface{}{o.WarningID} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + if o.R != nil { + for _, rel := range o.R.WarningWarningRefs { + queries.SetScanner(&rel.WarningID, nil) + if rel.R == nil { + continue + } + + rel.R.Warning = nil + } + o.R.WarningWarningRefs = nil + } + + return o.AddWarningWarningRefs(ctx, exec, insert, related...) +} + +// RemoveWarningWarningRefs relationships from objects passed in. +// Removes related items from R.WarningWarningRefs (uses pointer comparison, removal does not keep order) +// Sets related.R.Warning. +func (o *DrugWarning) RemoveWarningWarningRefs(ctx context.Context, exec boil.ContextExecutor, related ...*WarningRef) error { + if len(related) == 0 { + return nil + } + + var err error + for _, rel := range related { + queries.SetScanner(&rel.WarningID, nil) + if rel.R != nil { + rel.R.Warning = nil + } + if _, err = rel.Update(ctx, exec, boil.Whitelist("warning_id")); err != nil { + return err + } + } + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.WarningWarningRefs { + if rel != ri { + continue + } + + ln := len(o.R.WarningWarningRefs) + if ln > 1 && i < ln-1 { + o.R.WarningWarningRefs[i] = o.R.WarningWarningRefs[ln-1] + } + o.R.WarningWarningRefs = o.R.WarningWarningRefs[:ln-1] + break + } + } + + return nil +} + +// DrugWarnings retrieves all the records using an executor. +func DrugWarnings(mods ...qm.QueryMod) drugWarningQuery { + mods = append(mods, qm.From("\"drug_warning\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"drug_warning\".*"}) + } + + return drugWarningQuery{q} +} + +// FindDrugWarning retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindDrugWarning(ctx context.Context, exec boil.ContextExecutor, warningID int64, selectCols ...string) (*DrugWarning, error) { + drugWarningObj := &DrugWarning{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"drug_warning\" where \"warning_id\"=?", sel, + ) + + q := queries.Raw(query, warningID) + + err := q.Bind(ctx, exec, drugWarningObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from drug_warning") + } + + if err = drugWarningObj.doAfterSelectHooks(ctx, exec); err != nil { + return drugWarningObj, err + } + + return drugWarningObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *DrugWarning) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no drug_warning provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(drugWarningColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + drugWarningInsertCacheMut.RLock() + cache, cached := drugWarningInsertCache[key] + drugWarningInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + drugWarningAllColumns, + drugWarningColumnsWithDefault, + drugWarningColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(drugWarningType, drugWarningMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(drugWarningType, drugWarningMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"drug_warning\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"drug_warning\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into drug_warning") + } + + if !cached { + drugWarningInsertCacheMut.Lock() + drugWarningInsertCache[key] = cache + drugWarningInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the DrugWarning. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *DrugWarning) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + drugWarningUpdateCacheMut.RLock() + cache, cached := drugWarningUpdateCache[key] + drugWarningUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + drugWarningAllColumns, + drugWarningPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update drug_warning, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"drug_warning\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, drugWarningPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(drugWarningType, drugWarningMapping, append(wl, drugWarningPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update drug_warning row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for drug_warning") + } + + if !cached { + drugWarningUpdateCacheMut.Lock() + drugWarningUpdateCache[key] = cache + drugWarningUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q drugWarningQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for drug_warning") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for drug_warning") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o DrugWarningSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), drugWarningPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"drug_warning\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, drugWarningPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in drugWarning slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all drugWarning") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *DrugWarning) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no drug_warning provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(drugWarningColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + drugWarningUpsertCacheMut.RLock() + cache, cached := drugWarningUpsertCache[key] + drugWarningUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + drugWarningAllColumns, + drugWarningColumnsWithDefault, + drugWarningColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + drugWarningAllColumns, + drugWarningPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert drug_warning, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(drugWarningPrimaryKeyColumns)) + copy(conflict, drugWarningPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"drug_warning\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(drugWarningType, drugWarningMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(drugWarningType, drugWarningMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert drug_warning") + } + + if !cached { + drugWarningUpsertCacheMut.Lock() + drugWarningUpsertCache[key] = cache + drugWarningUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single DrugWarning record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *DrugWarning) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no DrugWarning provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), drugWarningPrimaryKeyMapping) + sql := "DELETE FROM \"drug_warning\" WHERE \"warning_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from drug_warning") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for drug_warning") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q drugWarningQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no drugWarningQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from drug_warning") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for drug_warning") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o DrugWarningSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(drugWarningBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), drugWarningPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"drug_warning\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, drugWarningPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from drugWarning slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for drug_warning") + } + + if len(drugWarningAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *DrugWarning) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindDrugWarning(ctx, exec, o.WarningID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *DrugWarningSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := DrugWarningSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), drugWarningPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"drug_warning\".* FROM \"drug_warning\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, drugWarningPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in DrugWarningSlice") + } + + *o = slice + + return nil +} + +// DrugWarningExists checks if the DrugWarning row exists. +func DrugWarningExists(ctx context.Context, exec boil.ContextExecutor, warningID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"drug_warning\" where \"warning_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, warningID) + } + row := exec.QueryRowContext(ctx, sql, warningID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if drug_warning exists") + } + + return exists, nil +} diff --git a/models/formulations.go b/models/formulations.go new file mode 100644 index 0000000..cea52c8 --- /dev/null +++ b/models/formulations.go @@ -0,0 +1,1472 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// Formulation is an object representing the database table. +type Formulation struct { + ProductID string `boil:"product_id" json:"product_id" toml:"product_id" yaml:"product_id"` + Ingredient null.String `boil:"ingredient" json:"ingredient,omitempty" toml:"ingredient" yaml:"ingredient,omitempty"` + Strength null.String `boil:"strength" json:"strength,omitempty" toml:"strength" yaml:"strength,omitempty"` + RecordID int64 `boil:"record_id" json:"record_id" toml:"record_id" yaml:"record_id"` + Molregno null.Int64 `boil:"molregno" json:"molregno,omitempty" toml:"molregno" yaml:"molregno,omitempty"` + FormulationID int64 `boil:"formulation_id" json:"formulation_id" toml:"formulation_id" yaml:"formulation_id"` + + R *formulationR `boil:"-" json:"-" toml:"-" yaml:"-"` + L formulationL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var FormulationColumns = struct { + ProductID string + Ingredient string + Strength string + RecordID string + Molregno string + FormulationID string +}{ + ProductID: "product_id", + Ingredient: "ingredient", + Strength: "strength", + RecordID: "record_id", + Molregno: "molregno", + FormulationID: "formulation_id", +} + +var FormulationTableColumns = struct { + ProductID string + Ingredient string + Strength string + RecordID string + Molregno string + FormulationID string +}{ + ProductID: "formulations.product_id", + Ingredient: "formulations.ingredient", + Strength: "formulations.strength", + RecordID: "formulations.record_id", + Molregno: "formulations.molregno", + FormulationID: "formulations.formulation_id", +} + +// Generated where + +var FormulationWhere = struct { + ProductID whereHelperstring + Ingredient whereHelpernull_String + Strength whereHelpernull_String + RecordID whereHelperint64 + Molregno whereHelpernull_Int64 + FormulationID whereHelperint64 +}{ + ProductID: whereHelperstring{field: "\"formulations\".\"product_id\""}, + Ingredient: whereHelpernull_String{field: "\"formulations\".\"ingredient\""}, + Strength: whereHelpernull_String{field: "\"formulations\".\"strength\""}, + RecordID: whereHelperint64{field: "\"formulations\".\"record_id\""}, + Molregno: whereHelpernull_Int64{field: "\"formulations\".\"molregno\""}, + FormulationID: whereHelperint64{field: "\"formulations\".\"formulation_id\""}, +} + +// FormulationRels is where relationship names are stored. +var FormulationRels = struct { + Record string + Product string + MolregnoMoleculeDictionary string +}{ + Record: "Record", + Product: "Product", + MolregnoMoleculeDictionary: "MolregnoMoleculeDictionary", +} + +// formulationR is where relationships are stored. +type formulationR struct { + Record *CompoundRecord `boil:"Record" json:"Record" toml:"Record" yaml:"Record"` + Product *Product `boil:"Product" json:"Product" toml:"Product" yaml:"Product"` + MolregnoMoleculeDictionary *MoleculeDictionary `boil:"MolregnoMoleculeDictionary" json:"MolregnoMoleculeDictionary" toml:"MolregnoMoleculeDictionary" yaml:"MolregnoMoleculeDictionary"` +} + +// NewStruct creates a new relationship struct +func (*formulationR) NewStruct() *formulationR { + return &formulationR{} +} + +func (r *formulationR) GetRecord() *CompoundRecord { + if r == nil { + return nil + } + return r.Record +} + +func (r *formulationR) GetProduct() *Product { + if r == nil { + return nil + } + return r.Product +} + +func (r *formulationR) GetMolregnoMoleculeDictionary() *MoleculeDictionary { + if r == nil { + return nil + } + return r.MolregnoMoleculeDictionary +} + +// formulationL is where Load methods for each relationship are stored. +type formulationL struct{} + +var ( + formulationAllColumns = []string{"product_id", "ingredient", "strength", "record_id", "molregno", "formulation_id"} + formulationColumnsWithoutDefault = []string{"product_id", "record_id", "formulation_id"} + formulationColumnsWithDefault = []string{"ingredient", "strength", "molregno"} + formulationPrimaryKeyColumns = []string{"formulation_id"} + formulationGeneratedColumns = []string{} +) + +type ( + // FormulationSlice is an alias for a slice of pointers to Formulation. + // This should almost always be used instead of []Formulation. + FormulationSlice []*Formulation + // FormulationHook is the signature for custom Formulation hook methods + FormulationHook func(context.Context, boil.ContextExecutor, *Formulation) error + + formulationQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + formulationType = reflect.TypeOf(&Formulation{}) + formulationMapping = queries.MakeStructMapping(formulationType) + formulationPrimaryKeyMapping, _ = queries.BindMapping(formulationType, formulationMapping, formulationPrimaryKeyColumns) + formulationInsertCacheMut sync.RWMutex + formulationInsertCache = make(map[string]insertCache) + formulationUpdateCacheMut sync.RWMutex + formulationUpdateCache = make(map[string]updateCache) + formulationUpsertCacheMut sync.RWMutex + formulationUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var formulationAfterSelectHooks []FormulationHook + +var formulationBeforeInsertHooks []FormulationHook +var formulationAfterInsertHooks []FormulationHook + +var formulationBeforeUpdateHooks []FormulationHook +var formulationAfterUpdateHooks []FormulationHook + +var formulationBeforeDeleteHooks []FormulationHook +var formulationAfterDeleteHooks []FormulationHook + +var formulationBeforeUpsertHooks []FormulationHook +var formulationAfterUpsertHooks []FormulationHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *Formulation) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range formulationAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *Formulation) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range formulationBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *Formulation) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range formulationAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *Formulation) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range formulationBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *Formulation) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range formulationAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *Formulation) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range formulationBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *Formulation) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range formulationAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *Formulation) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range formulationBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *Formulation) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range formulationAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddFormulationHook registers your hook function for all future operations. +func AddFormulationHook(hookPoint boil.HookPoint, formulationHook FormulationHook) { + switch hookPoint { + case boil.AfterSelectHook: + formulationAfterSelectHooks = append(formulationAfterSelectHooks, formulationHook) + case boil.BeforeInsertHook: + formulationBeforeInsertHooks = append(formulationBeforeInsertHooks, formulationHook) + case boil.AfterInsertHook: + formulationAfterInsertHooks = append(formulationAfterInsertHooks, formulationHook) + case boil.BeforeUpdateHook: + formulationBeforeUpdateHooks = append(formulationBeforeUpdateHooks, formulationHook) + case boil.AfterUpdateHook: + formulationAfterUpdateHooks = append(formulationAfterUpdateHooks, formulationHook) + case boil.BeforeDeleteHook: + formulationBeforeDeleteHooks = append(formulationBeforeDeleteHooks, formulationHook) + case boil.AfterDeleteHook: + formulationAfterDeleteHooks = append(formulationAfterDeleteHooks, formulationHook) + case boil.BeforeUpsertHook: + formulationBeforeUpsertHooks = append(formulationBeforeUpsertHooks, formulationHook) + case boil.AfterUpsertHook: + formulationAfterUpsertHooks = append(formulationAfterUpsertHooks, formulationHook) + } +} + +// One returns a single formulation record from the query. +func (q formulationQuery) One(ctx context.Context, exec boil.ContextExecutor) (*Formulation, error) { + o := &Formulation{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for formulations") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all Formulation records from the query. +func (q formulationQuery) All(ctx context.Context, exec boil.ContextExecutor) (FormulationSlice, error) { + var o []*Formulation + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to Formulation slice") + } + + if len(formulationAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all Formulation records in the query. +func (q formulationQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count formulations rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q formulationQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if formulations exists") + } + + return count > 0, nil +} + +// Record pointed to by the foreign key. +func (o *Formulation) Record(mods ...qm.QueryMod) compoundRecordQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"record_id\" = ?", o.RecordID), + } + + queryMods = append(queryMods, mods...) + + return CompoundRecords(queryMods...) +} + +// Product pointed to by the foreign key. +func (o *Formulation) Product(mods ...qm.QueryMod) productQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"product_id\" = ?", o.ProductID), + } + + queryMods = append(queryMods, mods...) + + return Products(queryMods...) +} + +// MolregnoMoleculeDictionary pointed to by the foreign key. +func (o *Formulation) MolregnoMoleculeDictionary(mods ...qm.QueryMod) moleculeDictionaryQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"molregno\" = ?", o.Molregno), + } + + queryMods = append(queryMods, mods...) + + return MoleculeDictionaries(queryMods...) +} + +// LoadRecord allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (formulationL) LoadRecord(ctx context.Context, e boil.ContextExecutor, singular bool, maybeFormulation interface{}, mods queries.Applicator) error { + var slice []*Formulation + var object *Formulation + + if singular { + object = maybeFormulation.(*Formulation) + } else { + slice = *maybeFormulation.(*[]*Formulation) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &formulationR{} + } + args = append(args, object.RecordID) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &formulationR{} + } + + for _, a := range args { + if a == obj.RecordID { + continue Outer + } + } + + args = append(args, obj.RecordID) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`compound_records`), + qm.WhereIn(`compound_records.record_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load CompoundRecord") + } + + var resultSlice []*CompoundRecord + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice CompoundRecord") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for compound_records") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for compound_records") + } + + if len(formulationAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Record = foreign + if foreign.R == nil { + foreign.R = &compoundRecordR{} + } + foreign.R.RecordFormulations = append(foreign.R.RecordFormulations, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.RecordID == foreign.RecordID { + local.R.Record = foreign + if foreign.R == nil { + foreign.R = &compoundRecordR{} + } + foreign.R.RecordFormulations = append(foreign.R.RecordFormulations, local) + break + } + } + } + + return nil +} + +// LoadProduct allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (formulationL) LoadProduct(ctx context.Context, e boil.ContextExecutor, singular bool, maybeFormulation interface{}, mods queries.Applicator) error { + var slice []*Formulation + var object *Formulation + + if singular { + object = maybeFormulation.(*Formulation) + } else { + slice = *maybeFormulation.(*[]*Formulation) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &formulationR{} + } + args = append(args, object.ProductID) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &formulationR{} + } + + for _, a := range args { + if a == obj.ProductID { + continue Outer + } + } + + args = append(args, obj.ProductID) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`products`), + qm.WhereIn(`products.product_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load Product") + } + + var resultSlice []*Product + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice Product") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for products") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for products") + } + + if len(formulationAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Product = foreign + if foreign.R == nil { + foreign.R = &productR{} + } + foreign.R.Formulations = append(foreign.R.Formulations, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.ProductID == foreign.ProductID { + local.R.Product = foreign + if foreign.R == nil { + foreign.R = &productR{} + } + foreign.R.Formulations = append(foreign.R.Formulations, local) + break + } + } + } + + return nil +} + +// LoadMolregnoMoleculeDictionary allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (formulationL) LoadMolregnoMoleculeDictionary(ctx context.Context, e boil.ContextExecutor, singular bool, maybeFormulation interface{}, mods queries.Applicator) error { + var slice []*Formulation + var object *Formulation + + if singular { + object = maybeFormulation.(*Formulation) + } else { + slice = *maybeFormulation.(*[]*Formulation) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &formulationR{} + } + if !queries.IsNil(object.Molregno) { + args = append(args, object.Molregno) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &formulationR{} + } + + for _, a := range args { + if queries.Equal(a, obj.Molregno) { + continue Outer + } + } + + if !queries.IsNil(obj.Molregno) { + args = append(args, obj.Molregno) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`molecule_dictionary`), + qm.WhereIn(`molecule_dictionary.molregno in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load MoleculeDictionary") + } + + var resultSlice []*MoleculeDictionary + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice MoleculeDictionary") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for molecule_dictionary") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for molecule_dictionary") + } + + if len(formulationAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.MolregnoMoleculeDictionary = foreign + if foreign.R == nil { + foreign.R = &moleculeDictionaryR{} + } + foreign.R.MolregnoFormulations = append(foreign.R.MolregnoFormulations, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.Molregno, foreign.Molregno) { + local.R.MolregnoMoleculeDictionary = foreign + if foreign.R == nil { + foreign.R = &moleculeDictionaryR{} + } + foreign.R.MolregnoFormulations = append(foreign.R.MolregnoFormulations, local) + break + } + } + } + + return nil +} + +// SetRecord of the formulation to the related item. +// Sets o.R.Record to related. +// Adds o to related.R.RecordFormulations. +func (o *Formulation) SetRecord(ctx context.Context, exec boil.ContextExecutor, insert bool, related *CompoundRecord) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"formulations\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"record_id"}), + strmangle.WhereClause("\"", "\"", 0, formulationPrimaryKeyColumns), + ) + values := []interface{}{related.RecordID, o.FormulationID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.RecordID = related.RecordID + if o.R == nil { + o.R = &formulationR{ + Record: related, + } + } else { + o.R.Record = related + } + + if related.R == nil { + related.R = &compoundRecordR{ + RecordFormulations: FormulationSlice{o}, + } + } else { + related.R.RecordFormulations = append(related.R.RecordFormulations, o) + } + + return nil +} + +// SetProduct of the formulation to the related item. +// Sets o.R.Product to related. +// Adds o to related.R.Formulations. +func (o *Formulation) SetProduct(ctx context.Context, exec boil.ContextExecutor, insert bool, related *Product) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"formulations\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"product_id"}), + strmangle.WhereClause("\"", "\"", 0, formulationPrimaryKeyColumns), + ) + values := []interface{}{related.ProductID, o.FormulationID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.ProductID = related.ProductID + if o.R == nil { + o.R = &formulationR{ + Product: related, + } + } else { + o.R.Product = related + } + + if related.R == nil { + related.R = &productR{ + Formulations: FormulationSlice{o}, + } + } else { + related.R.Formulations = append(related.R.Formulations, o) + } + + return nil +} + +// SetMolregnoMoleculeDictionary of the formulation to the related item. +// Sets o.R.MolregnoMoleculeDictionary to related. +// Adds o to related.R.MolregnoFormulations. +func (o *Formulation) SetMolregnoMoleculeDictionary(ctx context.Context, exec boil.ContextExecutor, insert bool, related *MoleculeDictionary) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"formulations\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"molregno"}), + strmangle.WhereClause("\"", "\"", 0, formulationPrimaryKeyColumns), + ) + values := []interface{}{related.Molregno, o.FormulationID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.Molregno, related.Molregno) + if o.R == nil { + o.R = &formulationR{ + MolregnoMoleculeDictionary: related, + } + } else { + o.R.MolregnoMoleculeDictionary = related + } + + if related.R == nil { + related.R = &moleculeDictionaryR{ + MolregnoFormulations: FormulationSlice{o}, + } + } else { + related.R.MolregnoFormulations = append(related.R.MolregnoFormulations, o) + } + + return nil +} + +// RemoveMolregnoMoleculeDictionary relationship. +// Sets o.R.MolregnoMoleculeDictionary to nil. +// Removes o from all passed in related items' relationships struct. +func (o *Formulation) RemoveMolregnoMoleculeDictionary(ctx context.Context, exec boil.ContextExecutor, related *MoleculeDictionary) error { + var err error + + queries.SetScanner(&o.Molregno, nil) + if _, err = o.Update(ctx, exec, boil.Whitelist("molregno")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.MolregnoMoleculeDictionary = nil + } + if related == nil || related.R == nil { + return nil + } + + for i, ri := range related.R.MolregnoFormulations { + if queries.Equal(o.Molregno, ri.Molregno) { + continue + } + + ln := len(related.R.MolregnoFormulations) + if ln > 1 && i < ln-1 { + related.R.MolregnoFormulations[i] = related.R.MolregnoFormulations[ln-1] + } + related.R.MolregnoFormulations = related.R.MolregnoFormulations[:ln-1] + break + } + return nil +} + +// Formulations retrieves all the records using an executor. +func Formulations(mods ...qm.QueryMod) formulationQuery { + mods = append(mods, qm.From("\"formulations\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"formulations\".*"}) + } + + return formulationQuery{q} +} + +// FindFormulation retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindFormulation(ctx context.Context, exec boil.ContextExecutor, formulationID int64, selectCols ...string) (*Formulation, error) { + formulationObj := &Formulation{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"formulations\" where \"formulation_id\"=?", sel, + ) + + q := queries.Raw(query, formulationID) + + err := q.Bind(ctx, exec, formulationObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from formulations") + } + + if err = formulationObj.doAfterSelectHooks(ctx, exec); err != nil { + return formulationObj, err + } + + return formulationObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *Formulation) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no formulations provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(formulationColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + formulationInsertCacheMut.RLock() + cache, cached := formulationInsertCache[key] + formulationInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + formulationAllColumns, + formulationColumnsWithDefault, + formulationColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(formulationType, formulationMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(formulationType, formulationMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"formulations\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"formulations\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into formulations") + } + + if !cached { + formulationInsertCacheMut.Lock() + formulationInsertCache[key] = cache + formulationInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the Formulation. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *Formulation) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + formulationUpdateCacheMut.RLock() + cache, cached := formulationUpdateCache[key] + formulationUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + formulationAllColumns, + formulationPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update formulations, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"formulations\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, formulationPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(formulationType, formulationMapping, append(wl, formulationPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update formulations row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for formulations") + } + + if !cached { + formulationUpdateCacheMut.Lock() + formulationUpdateCache[key] = cache + formulationUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q formulationQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for formulations") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for formulations") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o FormulationSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), formulationPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"formulations\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, formulationPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in formulation slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all formulation") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *Formulation) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no formulations provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(formulationColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + formulationUpsertCacheMut.RLock() + cache, cached := formulationUpsertCache[key] + formulationUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + formulationAllColumns, + formulationColumnsWithDefault, + formulationColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + formulationAllColumns, + formulationPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert formulations, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(formulationPrimaryKeyColumns)) + copy(conflict, formulationPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"formulations\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(formulationType, formulationMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(formulationType, formulationMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert formulations") + } + + if !cached { + formulationUpsertCacheMut.Lock() + formulationUpsertCache[key] = cache + formulationUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single Formulation record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *Formulation) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no Formulation provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), formulationPrimaryKeyMapping) + sql := "DELETE FROM \"formulations\" WHERE \"formulation_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from formulations") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for formulations") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q formulationQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no formulationQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from formulations") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for formulations") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o FormulationSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(formulationBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), formulationPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"formulations\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, formulationPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from formulation slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for formulations") + } + + if len(formulationAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *Formulation) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindFormulation(ctx, exec, o.FormulationID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *FormulationSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := FormulationSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), formulationPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"formulations\".* FROM \"formulations\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, formulationPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in FormulationSlice") + } + + *o = slice + + return nil +} + +// FormulationExists checks if the Formulation row exists. +func FormulationExists(ctx context.Context, exec boil.ContextExecutor, formulationID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"formulations\" where \"formulation_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, formulationID) + } + row := exec.QueryRowContext(ctx, sql, formulationID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if formulations exists") + } + + return exists, nil +} diff --git a/models/frac_classification.go b/models/frac_classification.go new file mode 100644 index 0000000..b60fbe7 --- /dev/null +++ b/models/frac_classification.go @@ -0,0 +1,1136 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// FracClassification is an object representing the database table. +type FracClassification struct { + FracClassID int64 `boil:"frac_class_id" json:"frac_class_id" toml:"frac_class_id" yaml:"frac_class_id"` + ActiveIngredient string `boil:"active_ingredient" json:"active_ingredient" toml:"active_ingredient" yaml:"active_ingredient"` + Level1 string `boil:"level1" json:"level1" toml:"level1" yaml:"level1"` + Level1Description string `boil:"level1_description" json:"level1_description" toml:"level1_description" yaml:"level1_description"` + Level2 string `boil:"level2" json:"level2" toml:"level2" yaml:"level2"` + Level2Description null.String `boil:"level2_description" json:"level2_description,omitempty" toml:"level2_description" yaml:"level2_description,omitempty"` + Level3 string `boil:"level3" json:"level3" toml:"level3" yaml:"level3"` + Level3Description null.String `boil:"level3_description" json:"level3_description,omitempty" toml:"level3_description" yaml:"level3_description,omitempty"` + Level4 string `boil:"level4" json:"level4" toml:"level4" yaml:"level4"` + Level4Description null.String `boil:"level4_description" json:"level4_description,omitempty" toml:"level4_description" yaml:"level4_description,omitempty"` + Level5 string `boil:"level5" json:"level5" toml:"level5" yaml:"level5"` + FracCode string `boil:"frac_code" json:"frac_code" toml:"frac_code" yaml:"frac_code"` + + R *fracClassificationR `boil:"-" json:"-" toml:"-" yaml:"-"` + L fracClassificationL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var FracClassificationColumns = struct { + FracClassID string + ActiveIngredient string + Level1 string + Level1Description string + Level2 string + Level2Description string + Level3 string + Level3Description string + Level4 string + Level4Description string + Level5 string + FracCode string +}{ + FracClassID: "frac_class_id", + ActiveIngredient: "active_ingredient", + Level1: "level1", + Level1Description: "level1_description", + Level2: "level2", + Level2Description: "level2_description", + Level3: "level3", + Level3Description: "level3_description", + Level4: "level4", + Level4Description: "level4_description", + Level5: "level5", + FracCode: "frac_code", +} + +var FracClassificationTableColumns = struct { + FracClassID string + ActiveIngredient string + Level1 string + Level1Description string + Level2 string + Level2Description string + Level3 string + Level3Description string + Level4 string + Level4Description string + Level5 string + FracCode string +}{ + FracClassID: "frac_classification.frac_class_id", + ActiveIngredient: "frac_classification.active_ingredient", + Level1: "frac_classification.level1", + Level1Description: "frac_classification.level1_description", + Level2: "frac_classification.level2", + Level2Description: "frac_classification.level2_description", + Level3: "frac_classification.level3", + Level3Description: "frac_classification.level3_description", + Level4: "frac_classification.level4", + Level4Description: "frac_classification.level4_description", + Level5: "frac_classification.level5", + FracCode: "frac_classification.frac_code", +} + +// Generated where + +var FracClassificationWhere = struct { + FracClassID whereHelperint64 + ActiveIngredient whereHelperstring + Level1 whereHelperstring + Level1Description whereHelperstring + Level2 whereHelperstring + Level2Description whereHelpernull_String + Level3 whereHelperstring + Level3Description whereHelpernull_String + Level4 whereHelperstring + Level4Description whereHelpernull_String + Level5 whereHelperstring + FracCode whereHelperstring +}{ + FracClassID: whereHelperint64{field: "\"frac_classification\".\"frac_class_id\""}, + ActiveIngredient: whereHelperstring{field: "\"frac_classification\".\"active_ingredient\""}, + Level1: whereHelperstring{field: "\"frac_classification\".\"level1\""}, + Level1Description: whereHelperstring{field: "\"frac_classification\".\"level1_description\""}, + Level2: whereHelperstring{field: "\"frac_classification\".\"level2\""}, + Level2Description: whereHelpernull_String{field: "\"frac_classification\".\"level2_description\""}, + Level3: whereHelperstring{field: "\"frac_classification\".\"level3\""}, + Level3Description: whereHelpernull_String{field: "\"frac_classification\".\"level3_description\""}, + Level4: whereHelperstring{field: "\"frac_classification\".\"level4\""}, + Level4Description: whereHelpernull_String{field: "\"frac_classification\".\"level4_description\""}, + Level5: whereHelperstring{field: "\"frac_classification\".\"level5\""}, + FracCode: whereHelperstring{field: "\"frac_classification\".\"frac_code\""}, +} + +// FracClassificationRels is where relationship names are stored. +var FracClassificationRels = struct { + FracClassMoleculeFracClassifications string +}{ + FracClassMoleculeFracClassifications: "FracClassMoleculeFracClassifications", +} + +// fracClassificationR is where relationships are stored. +type fracClassificationR struct { + FracClassMoleculeFracClassifications MoleculeFracClassificationSlice `boil:"FracClassMoleculeFracClassifications" json:"FracClassMoleculeFracClassifications" toml:"FracClassMoleculeFracClassifications" yaml:"FracClassMoleculeFracClassifications"` +} + +// NewStruct creates a new relationship struct +func (*fracClassificationR) NewStruct() *fracClassificationR { + return &fracClassificationR{} +} + +func (r *fracClassificationR) GetFracClassMoleculeFracClassifications() MoleculeFracClassificationSlice { + if r == nil { + return nil + } + return r.FracClassMoleculeFracClassifications +} + +// fracClassificationL is where Load methods for each relationship are stored. +type fracClassificationL struct{} + +var ( + fracClassificationAllColumns = []string{"frac_class_id", "active_ingredient", "level1", "level1_description", "level2", "level2_description", "level3", "level3_description", "level4", "level4_description", "level5", "frac_code"} + fracClassificationColumnsWithoutDefault = []string{"frac_class_id", "active_ingredient", "level1", "level1_description", "level2", "level3", "level4", "level5", "frac_code"} + fracClassificationColumnsWithDefault = []string{"level2_description", "level3_description", "level4_description"} + fracClassificationPrimaryKeyColumns = []string{"frac_class_id"} + fracClassificationGeneratedColumns = []string{} +) + +type ( + // FracClassificationSlice is an alias for a slice of pointers to FracClassification. + // This should almost always be used instead of []FracClassification. + FracClassificationSlice []*FracClassification + // FracClassificationHook is the signature for custom FracClassification hook methods + FracClassificationHook func(context.Context, boil.ContextExecutor, *FracClassification) error + + fracClassificationQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + fracClassificationType = reflect.TypeOf(&FracClassification{}) + fracClassificationMapping = queries.MakeStructMapping(fracClassificationType) + fracClassificationPrimaryKeyMapping, _ = queries.BindMapping(fracClassificationType, fracClassificationMapping, fracClassificationPrimaryKeyColumns) + fracClassificationInsertCacheMut sync.RWMutex + fracClassificationInsertCache = make(map[string]insertCache) + fracClassificationUpdateCacheMut sync.RWMutex + fracClassificationUpdateCache = make(map[string]updateCache) + fracClassificationUpsertCacheMut sync.RWMutex + fracClassificationUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var fracClassificationAfterSelectHooks []FracClassificationHook + +var fracClassificationBeforeInsertHooks []FracClassificationHook +var fracClassificationAfterInsertHooks []FracClassificationHook + +var fracClassificationBeforeUpdateHooks []FracClassificationHook +var fracClassificationAfterUpdateHooks []FracClassificationHook + +var fracClassificationBeforeDeleteHooks []FracClassificationHook +var fracClassificationAfterDeleteHooks []FracClassificationHook + +var fracClassificationBeforeUpsertHooks []FracClassificationHook +var fracClassificationAfterUpsertHooks []FracClassificationHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *FracClassification) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range fracClassificationAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *FracClassification) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range fracClassificationBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *FracClassification) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range fracClassificationAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *FracClassification) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range fracClassificationBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *FracClassification) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range fracClassificationAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *FracClassification) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range fracClassificationBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *FracClassification) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range fracClassificationAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *FracClassification) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range fracClassificationBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *FracClassification) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range fracClassificationAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddFracClassificationHook registers your hook function for all future operations. +func AddFracClassificationHook(hookPoint boil.HookPoint, fracClassificationHook FracClassificationHook) { + switch hookPoint { + case boil.AfterSelectHook: + fracClassificationAfterSelectHooks = append(fracClassificationAfterSelectHooks, fracClassificationHook) + case boil.BeforeInsertHook: + fracClassificationBeforeInsertHooks = append(fracClassificationBeforeInsertHooks, fracClassificationHook) + case boil.AfterInsertHook: + fracClassificationAfterInsertHooks = append(fracClassificationAfterInsertHooks, fracClassificationHook) + case boil.BeforeUpdateHook: + fracClassificationBeforeUpdateHooks = append(fracClassificationBeforeUpdateHooks, fracClassificationHook) + case boil.AfterUpdateHook: + fracClassificationAfterUpdateHooks = append(fracClassificationAfterUpdateHooks, fracClassificationHook) + case boil.BeforeDeleteHook: + fracClassificationBeforeDeleteHooks = append(fracClassificationBeforeDeleteHooks, fracClassificationHook) + case boil.AfterDeleteHook: + fracClassificationAfterDeleteHooks = append(fracClassificationAfterDeleteHooks, fracClassificationHook) + case boil.BeforeUpsertHook: + fracClassificationBeforeUpsertHooks = append(fracClassificationBeforeUpsertHooks, fracClassificationHook) + case boil.AfterUpsertHook: + fracClassificationAfterUpsertHooks = append(fracClassificationAfterUpsertHooks, fracClassificationHook) + } +} + +// One returns a single fracClassification record from the query. +func (q fracClassificationQuery) One(ctx context.Context, exec boil.ContextExecutor) (*FracClassification, error) { + o := &FracClassification{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for frac_classification") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all FracClassification records from the query. +func (q fracClassificationQuery) All(ctx context.Context, exec boil.ContextExecutor) (FracClassificationSlice, error) { + var o []*FracClassification + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to FracClassification slice") + } + + if len(fracClassificationAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all FracClassification records in the query. +func (q fracClassificationQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count frac_classification rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q fracClassificationQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if frac_classification exists") + } + + return count > 0, nil +} + +// FracClassMoleculeFracClassifications retrieves all the molecule_frac_classification's MoleculeFracClassifications with an executor via frac_class_id column. +func (o *FracClassification) FracClassMoleculeFracClassifications(mods ...qm.QueryMod) moleculeFracClassificationQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"molecule_frac_classification\".\"frac_class_id\"=?", o.FracClassID), + ) + + return MoleculeFracClassifications(queryMods...) +} + +// LoadFracClassMoleculeFracClassifications allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (fracClassificationL) LoadFracClassMoleculeFracClassifications(ctx context.Context, e boil.ContextExecutor, singular bool, maybeFracClassification interface{}, mods queries.Applicator) error { + var slice []*FracClassification + var object *FracClassification + + if singular { + object = maybeFracClassification.(*FracClassification) + } else { + slice = *maybeFracClassification.(*[]*FracClassification) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &fracClassificationR{} + } + args = append(args, object.FracClassID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &fracClassificationR{} + } + + for _, a := range args { + if a == obj.FracClassID { + continue Outer + } + } + + args = append(args, obj.FracClassID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`molecule_frac_classification`), + qm.WhereIn(`molecule_frac_classification.frac_class_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load molecule_frac_classification") + } + + var resultSlice []*MoleculeFracClassification + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice molecule_frac_classification") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on molecule_frac_classification") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for molecule_frac_classification") + } + + if len(moleculeFracClassificationAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.FracClassMoleculeFracClassifications = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &moleculeFracClassificationR{} + } + foreign.R.FracClass = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.FracClassID == foreign.FracClassID { + local.R.FracClassMoleculeFracClassifications = append(local.R.FracClassMoleculeFracClassifications, foreign) + if foreign.R == nil { + foreign.R = &moleculeFracClassificationR{} + } + foreign.R.FracClass = local + break + } + } + } + + return nil +} + +// AddFracClassMoleculeFracClassifications adds the given related objects to the existing relationships +// of the frac_classification, optionally inserting them as new records. +// Appends related to o.R.FracClassMoleculeFracClassifications. +// Sets related.R.FracClass appropriately. +func (o *FracClassification) AddFracClassMoleculeFracClassifications(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*MoleculeFracClassification) error { + var err error + for _, rel := range related { + if insert { + rel.FracClassID = o.FracClassID + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"molecule_frac_classification\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"frac_class_id"}), + strmangle.WhereClause("\"", "\"", 0, moleculeFracClassificationPrimaryKeyColumns), + ) + values := []interface{}{o.FracClassID, rel.MolFracID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.FracClassID = o.FracClassID + } + } + + if o.R == nil { + o.R = &fracClassificationR{ + FracClassMoleculeFracClassifications: related, + } + } else { + o.R.FracClassMoleculeFracClassifications = append(o.R.FracClassMoleculeFracClassifications, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &moleculeFracClassificationR{ + FracClass: o, + } + } else { + rel.R.FracClass = o + } + } + return nil +} + +// FracClassifications retrieves all the records using an executor. +func FracClassifications(mods ...qm.QueryMod) fracClassificationQuery { + mods = append(mods, qm.From("\"frac_classification\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"frac_classification\".*"}) + } + + return fracClassificationQuery{q} +} + +// FindFracClassification retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindFracClassification(ctx context.Context, exec boil.ContextExecutor, fracClassID int64, selectCols ...string) (*FracClassification, error) { + fracClassificationObj := &FracClassification{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"frac_classification\" where \"frac_class_id\"=?", sel, + ) + + q := queries.Raw(query, fracClassID) + + err := q.Bind(ctx, exec, fracClassificationObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from frac_classification") + } + + if err = fracClassificationObj.doAfterSelectHooks(ctx, exec); err != nil { + return fracClassificationObj, err + } + + return fracClassificationObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *FracClassification) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no frac_classification provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(fracClassificationColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + fracClassificationInsertCacheMut.RLock() + cache, cached := fracClassificationInsertCache[key] + fracClassificationInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + fracClassificationAllColumns, + fracClassificationColumnsWithDefault, + fracClassificationColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(fracClassificationType, fracClassificationMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(fracClassificationType, fracClassificationMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"frac_classification\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"frac_classification\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into frac_classification") + } + + if !cached { + fracClassificationInsertCacheMut.Lock() + fracClassificationInsertCache[key] = cache + fracClassificationInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the FracClassification. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *FracClassification) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + fracClassificationUpdateCacheMut.RLock() + cache, cached := fracClassificationUpdateCache[key] + fracClassificationUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + fracClassificationAllColumns, + fracClassificationPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update frac_classification, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"frac_classification\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, fracClassificationPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(fracClassificationType, fracClassificationMapping, append(wl, fracClassificationPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update frac_classification row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for frac_classification") + } + + if !cached { + fracClassificationUpdateCacheMut.Lock() + fracClassificationUpdateCache[key] = cache + fracClassificationUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q fracClassificationQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for frac_classification") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for frac_classification") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o FracClassificationSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), fracClassificationPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"frac_classification\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, fracClassificationPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in fracClassification slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all fracClassification") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *FracClassification) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no frac_classification provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(fracClassificationColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + fracClassificationUpsertCacheMut.RLock() + cache, cached := fracClassificationUpsertCache[key] + fracClassificationUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + fracClassificationAllColumns, + fracClassificationColumnsWithDefault, + fracClassificationColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + fracClassificationAllColumns, + fracClassificationPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert frac_classification, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(fracClassificationPrimaryKeyColumns)) + copy(conflict, fracClassificationPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"frac_classification\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(fracClassificationType, fracClassificationMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(fracClassificationType, fracClassificationMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert frac_classification") + } + + if !cached { + fracClassificationUpsertCacheMut.Lock() + fracClassificationUpsertCache[key] = cache + fracClassificationUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single FracClassification record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *FracClassification) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no FracClassification provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), fracClassificationPrimaryKeyMapping) + sql := "DELETE FROM \"frac_classification\" WHERE \"frac_class_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from frac_classification") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for frac_classification") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q fracClassificationQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no fracClassificationQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from frac_classification") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for frac_classification") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o FracClassificationSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(fracClassificationBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), fracClassificationPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"frac_classification\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, fracClassificationPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from fracClassification slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for frac_classification") + } + + if len(fracClassificationAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *FracClassification) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindFracClassification(ctx, exec, o.FracClassID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *FracClassificationSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := FracClassificationSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), fracClassificationPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"frac_classification\".* FROM \"frac_classification\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, fracClassificationPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in FracClassificationSlice") + } + + *o = slice + + return nil +} + +// FracClassificationExists checks if the FracClassification row exists. +func FracClassificationExists(ctx context.Context, exec boil.ContextExecutor, fracClassID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"frac_classification\" where \"frac_class_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, fracClassID) + } + row := exec.QueryRowContext(ctx, sql, fracClassID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if frac_classification exists") + } + + return exists, nil +} diff --git a/models/genbank.go b/models/genbank.go new file mode 100644 index 0000000..1aaa52e --- /dev/null +++ b/models/genbank.go @@ -0,0 +1,1348 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// Genbank is an object representing the database table. +type Genbank struct { + Accession null.String `boil:"accession" json:"accession,omitempty" toml:"accession" yaml:"accession,omitempty"` + Seqhash string `boil:"seqhash" json:"seqhash" toml:"seqhash" yaml:"seqhash"` + + R *genbankR `boil:"-" json:"-" toml:"-" yaml:"-"` + L genbankL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var GenbankColumns = struct { + Accession string + Seqhash string +}{ + Accession: "accession", + Seqhash: "seqhash", +} + +var GenbankTableColumns = struct { + Accession string + Seqhash string +}{ + Accession: "genbank.accession", + Seqhash: "genbank.seqhash", +} + +// Generated where + +var GenbankWhere = struct { + Accession whereHelpernull_String + Seqhash whereHelperstring +}{ + Accession: whereHelpernull_String{field: "\"genbank\".\"accession\""}, + Seqhash: whereHelperstring{field: "\"genbank\".\"seqhash\""}, +} + +// GenbankRels is where relationship names are stored. +var GenbankRels = struct { + GenbankSeqhash string + Seqhashes string +}{ + GenbankSeqhash: "GenbankSeqhash", + Seqhashes: "Seqhashes", +} + +// genbankR is where relationships are stored. +type genbankR struct { + GenbankSeqhash *Seqhash `boil:"GenbankSeqhash" json:"GenbankSeqhash" toml:"GenbankSeqhash" yaml:"GenbankSeqhash"` + Seqhashes SeqhashSlice `boil:"Seqhashes" json:"Seqhashes" toml:"Seqhashes" yaml:"Seqhashes"` +} + +// NewStruct creates a new relationship struct +func (*genbankR) NewStruct() *genbankR { + return &genbankR{} +} + +func (r *genbankR) GetGenbankSeqhash() *Seqhash { + if r == nil { + return nil + } + return r.GenbankSeqhash +} + +func (r *genbankR) GetSeqhashes() SeqhashSlice { + if r == nil { + return nil + } + return r.Seqhashes +} + +// genbankL is where Load methods for each relationship are stored. +type genbankL struct{} + +var ( + genbankAllColumns = []string{"accession", "seqhash"} + genbankColumnsWithoutDefault = []string{"seqhash"} + genbankColumnsWithDefault = []string{"accession"} + genbankPrimaryKeyColumns = []string{"accession"} + genbankGeneratedColumns = []string{} +) + +type ( + // GenbankSlice is an alias for a slice of pointers to Genbank. + // This should almost always be used instead of []Genbank. + GenbankSlice []*Genbank + // GenbankHook is the signature for custom Genbank hook methods + GenbankHook func(context.Context, boil.ContextExecutor, *Genbank) error + + genbankQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + genbankType = reflect.TypeOf(&Genbank{}) + genbankMapping = queries.MakeStructMapping(genbankType) + genbankPrimaryKeyMapping, _ = queries.BindMapping(genbankType, genbankMapping, genbankPrimaryKeyColumns) + genbankInsertCacheMut sync.RWMutex + genbankInsertCache = make(map[string]insertCache) + genbankUpdateCacheMut sync.RWMutex + genbankUpdateCache = make(map[string]updateCache) + genbankUpsertCacheMut sync.RWMutex + genbankUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var genbankAfterSelectHooks []GenbankHook + +var genbankBeforeInsertHooks []GenbankHook +var genbankAfterInsertHooks []GenbankHook + +var genbankBeforeUpdateHooks []GenbankHook +var genbankAfterUpdateHooks []GenbankHook + +var genbankBeforeDeleteHooks []GenbankHook +var genbankAfterDeleteHooks []GenbankHook + +var genbankBeforeUpsertHooks []GenbankHook +var genbankAfterUpsertHooks []GenbankHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *Genbank) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range genbankAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *Genbank) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range genbankBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *Genbank) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range genbankAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *Genbank) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range genbankBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *Genbank) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range genbankAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *Genbank) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range genbankBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *Genbank) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range genbankAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *Genbank) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range genbankBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *Genbank) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range genbankAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddGenbankHook registers your hook function for all future operations. +func AddGenbankHook(hookPoint boil.HookPoint, genbankHook GenbankHook) { + switch hookPoint { + case boil.AfterSelectHook: + genbankAfterSelectHooks = append(genbankAfterSelectHooks, genbankHook) + case boil.BeforeInsertHook: + genbankBeforeInsertHooks = append(genbankBeforeInsertHooks, genbankHook) + case boil.AfterInsertHook: + genbankAfterInsertHooks = append(genbankAfterInsertHooks, genbankHook) + case boil.BeforeUpdateHook: + genbankBeforeUpdateHooks = append(genbankBeforeUpdateHooks, genbankHook) + case boil.AfterUpdateHook: + genbankAfterUpdateHooks = append(genbankAfterUpdateHooks, genbankHook) + case boil.BeforeDeleteHook: + genbankBeforeDeleteHooks = append(genbankBeforeDeleteHooks, genbankHook) + case boil.AfterDeleteHook: + genbankAfterDeleteHooks = append(genbankAfterDeleteHooks, genbankHook) + case boil.BeforeUpsertHook: + genbankBeforeUpsertHooks = append(genbankBeforeUpsertHooks, genbankHook) + case boil.AfterUpsertHook: + genbankAfterUpsertHooks = append(genbankAfterUpsertHooks, genbankHook) + } +} + +// One returns a single genbank record from the query. +func (q genbankQuery) One(ctx context.Context, exec boil.ContextExecutor) (*Genbank, error) { + o := &Genbank{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for genbank") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all Genbank records from the query. +func (q genbankQuery) All(ctx context.Context, exec boil.ContextExecutor) (GenbankSlice, error) { + var o []*Genbank + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to Genbank slice") + } + + if len(genbankAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all Genbank records in the query. +func (q genbankQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count genbank rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q genbankQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if genbank exists") + } + + return count > 0, nil +} + +// GenbankSeqhash pointed to by the foreign key. +func (o *Genbank) GenbankSeqhash(mods ...qm.QueryMod) seqhashQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"seqhash\" = ?", o.Seqhash), + } + + queryMods = append(queryMods, mods...) + + return Seqhashes(queryMods...) +} + +// Seqhashes retrieves all the seqhash's Seqhashes with an executor. +func (o *Genbank) Seqhashes(mods ...qm.QueryMod) seqhashQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.InnerJoin("\"genbank_features\" on \"seqhash\".\"seqhash\" = \"genbank_features\".\"seqhash\""), + qm.Where("\"genbank_features\".\"parent\"=?", o.Accession), + ) + + return Seqhashes(queryMods...) +} + +// LoadGenbankSeqhash allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (genbankL) LoadGenbankSeqhash(ctx context.Context, e boil.ContextExecutor, singular bool, maybeGenbank interface{}, mods queries.Applicator) error { + var slice []*Genbank + var object *Genbank + + if singular { + object = maybeGenbank.(*Genbank) + } else { + slice = *maybeGenbank.(*[]*Genbank) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &genbankR{} + } + args = append(args, object.Seqhash) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &genbankR{} + } + + for _, a := range args { + if a == obj.Seqhash { + continue Outer + } + } + + args = append(args, obj.Seqhash) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`seqhash`), + qm.WhereIn(`seqhash.seqhash in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load Seqhash") + } + + var resultSlice []*Seqhash + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice Seqhash") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for seqhash") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for seqhash") + } + + if len(genbankAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.GenbankSeqhash = foreign + if foreign.R == nil { + foreign.R = &seqhashR{} + } + foreign.R.Genbanks = append(foreign.R.Genbanks, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.Seqhash == foreign.Seqhash { + local.R.GenbankSeqhash = foreign + if foreign.R == nil { + foreign.R = &seqhashR{} + } + foreign.R.Genbanks = append(foreign.R.Genbanks, local) + break + } + } + } + + return nil +} + +// LoadSeqhashes allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (genbankL) LoadSeqhashes(ctx context.Context, e boil.ContextExecutor, singular bool, maybeGenbank interface{}, mods queries.Applicator) error { + var slice []*Genbank + var object *Genbank + + if singular { + object = maybeGenbank.(*Genbank) + } else { + slice = *maybeGenbank.(*[]*Genbank) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &genbankR{} + } + args = append(args, object.Accession) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &genbankR{} + } + + for _, a := range args { + if queries.Equal(a, obj.Accession) { + continue Outer + } + } + + args = append(args, obj.Accession) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.Select("\"seqhash\".\"seqhash\", \"seqhash\".\"sequence\", \"seqhash\".\"circular\", \"seqhash\".\"doublestranded\", \"seqhash\".\"seqhashtype\", \"seqhash\".\"translation\", \"a\".\"parent\""), + qm.From("\"seqhash\""), + qm.InnerJoin("\"genbank_features\" as \"a\" on \"seqhash\".\"seqhash\" = \"a\".\"seqhash\""), + qm.WhereIn("\"a\".\"parent\" in ?", args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load seqhash") + } + + var resultSlice []*Seqhash + + var localJoinCols []string + for results.Next() { + one := new(Seqhash) + var localJoinCol string + + err = results.Scan(&one.Seqhash, &one.Sequence, &one.Circular, &one.Doublestranded, &one.Seqhashtype, &one.Translation, &localJoinCol) + if err != nil { + return errors.Wrap(err, "failed to scan eager loaded results for seqhash") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "failed to plebian-bind eager loaded slice seqhash") + } + + resultSlice = append(resultSlice, one) + localJoinCols = append(localJoinCols, localJoinCol) + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on seqhash") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for seqhash") + } + + if len(seqhashAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.Seqhashes = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &seqhashR{} + } + foreign.R.ParentGenbanks = append(foreign.R.ParentGenbanks, object) + } + return nil + } + + for i, foreign := range resultSlice { + localJoinCol := localJoinCols[i] + for _, local := range slice { + if queries.Equal(local.Accession, localJoinCol) { + local.R.Seqhashes = append(local.R.Seqhashes, foreign) + if foreign.R == nil { + foreign.R = &seqhashR{} + } + foreign.R.ParentGenbanks = append(foreign.R.ParentGenbanks, local) + break + } + } + } + + return nil +} + +// SetGenbankSeqhash of the genbank to the related item. +// Sets o.R.GenbankSeqhash to related. +// Adds o to related.R.Genbanks. +func (o *Genbank) SetGenbankSeqhash(ctx context.Context, exec boil.ContextExecutor, insert bool, related *Seqhash) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"genbank\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"seqhash"}), + strmangle.WhereClause("\"", "\"", 0, genbankPrimaryKeyColumns), + ) + values := []interface{}{related.Seqhash, o.Accession} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.Seqhash = related.Seqhash + if o.R == nil { + o.R = &genbankR{ + GenbankSeqhash: related, + } + } else { + o.R.GenbankSeqhash = related + } + + if related.R == nil { + related.R = &seqhashR{ + Genbanks: GenbankSlice{o}, + } + } else { + related.R.Genbanks = append(related.R.Genbanks, o) + } + + return nil +} + +// AddSeqhashes adds the given related objects to the existing relationships +// of the genbank, optionally inserting them as new records. +// Appends related to o.R.Seqhashes. +// Sets related.R.ParentGenbanks appropriately. +func (o *Genbank) AddSeqhashes(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Seqhash) error { + var err error + for _, rel := range related { + if insert { + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + } + + for _, rel := range related { + query := "insert into \"genbank_features\" (\"parent\", \"seqhash\") values (?, ?)" + values := []interface{}{o.Accession, rel.Seqhash} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err = exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to insert into join table") + } + } + if o.R == nil { + o.R = &genbankR{ + Seqhashes: related, + } + } else { + o.R.Seqhashes = append(o.R.Seqhashes, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &seqhashR{ + ParentGenbanks: GenbankSlice{o}, + } + } else { + rel.R.ParentGenbanks = append(rel.R.ParentGenbanks, o) + } + } + return nil +} + +// SetSeqhashes removes all previously related items of the +// genbank replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.ParentGenbanks's Seqhashes accordingly. +// Replaces o.R.Seqhashes with related. +// Sets related.R.ParentGenbanks's Seqhashes accordingly. +func (o *Genbank) SetSeqhashes(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Seqhash) error { + query := "delete from \"genbank_features\" where \"parent\" = ?" + values := []interface{}{o.Accession} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + removeSeqhashesFromParentGenbanksSlice(o, related) + if o.R != nil { + o.R.Seqhashes = nil + } + + return o.AddSeqhashes(ctx, exec, insert, related...) +} + +// RemoveSeqhashes relationships from objects passed in. +// Removes related items from R.Seqhashes (uses pointer comparison, removal does not keep order) +// Sets related.R.ParentGenbanks. +func (o *Genbank) RemoveSeqhashes(ctx context.Context, exec boil.ContextExecutor, related ...*Seqhash) error { + if len(related) == 0 { + return nil + } + + var err error + query := fmt.Sprintf( + "delete from \"genbank_features\" where \"parent\" = ? and \"seqhash\" in (%s)", + strmangle.Placeholders(dialect.UseIndexPlaceholders, len(related), 2, 1), + ) + values := []interface{}{o.Accession} + for _, rel := range related { + values = append(values, rel.Seqhash) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err = exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + removeSeqhashesFromParentGenbanksSlice(o, related) + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.Seqhashes { + if rel != ri { + continue + } + + ln := len(o.R.Seqhashes) + if ln > 1 && i < ln-1 { + o.R.Seqhashes[i] = o.R.Seqhashes[ln-1] + } + o.R.Seqhashes = o.R.Seqhashes[:ln-1] + break + } + } + + return nil +} + +func removeSeqhashesFromParentGenbanksSlice(o *Genbank, related []*Seqhash) { + for _, rel := range related { + if rel.R == nil { + continue + } + for i, ri := range rel.R.ParentGenbanks { + if !queries.Equal(o.Accession, ri.Accession) { + continue + } + + ln := len(rel.R.ParentGenbanks) + if ln > 1 && i < ln-1 { + rel.R.ParentGenbanks[i] = rel.R.ParentGenbanks[ln-1] + } + rel.R.ParentGenbanks = rel.R.ParentGenbanks[:ln-1] + break + } + } +} + +// Genbanks retrieves all the records using an executor. +func Genbanks(mods ...qm.QueryMod) genbankQuery { + mods = append(mods, qm.From("\"genbank\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"genbank\".*"}) + } + + return genbankQuery{q} +} + +// FindGenbank retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindGenbank(ctx context.Context, exec boil.ContextExecutor, accession null.String, selectCols ...string) (*Genbank, error) { + genbankObj := &Genbank{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"genbank\" where \"accession\"=?", sel, + ) + + q := queries.Raw(query, accession) + + err := q.Bind(ctx, exec, genbankObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from genbank") + } + + if err = genbankObj.doAfterSelectHooks(ctx, exec); err != nil { + return genbankObj, err + } + + return genbankObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *Genbank) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no genbank provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(genbankColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + genbankInsertCacheMut.RLock() + cache, cached := genbankInsertCache[key] + genbankInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + genbankAllColumns, + genbankColumnsWithDefault, + genbankColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(genbankType, genbankMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(genbankType, genbankMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"genbank\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"genbank\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into genbank") + } + + if !cached { + genbankInsertCacheMut.Lock() + genbankInsertCache[key] = cache + genbankInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the Genbank. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *Genbank) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + genbankUpdateCacheMut.RLock() + cache, cached := genbankUpdateCache[key] + genbankUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + genbankAllColumns, + genbankPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update genbank, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"genbank\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, genbankPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(genbankType, genbankMapping, append(wl, genbankPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update genbank row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for genbank") + } + + if !cached { + genbankUpdateCacheMut.Lock() + genbankUpdateCache[key] = cache + genbankUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q genbankQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for genbank") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for genbank") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o GenbankSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), genbankPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"genbank\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, genbankPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in genbank slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all genbank") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *Genbank) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no genbank provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(genbankColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + genbankUpsertCacheMut.RLock() + cache, cached := genbankUpsertCache[key] + genbankUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + genbankAllColumns, + genbankColumnsWithDefault, + genbankColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + genbankAllColumns, + genbankPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert genbank, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(genbankPrimaryKeyColumns)) + copy(conflict, genbankPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"genbank\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(genbankType, genbankMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(genbankType, genbankMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert genbank") + } + + if !cached { + genbankUpsertCacheMut.Lock() + genbankUpsertCache[key] = cache + genbankUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single Genbank record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *Genbank) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no Genbank provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), genbankPrimaryKeyMapping) + sql := "DELETE FROM \"genbank\" WHERE \"accession\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from genbank") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for genbank") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q genbankQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no genbankQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from genbank") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for genbank") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o GenbankSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(genbankBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), genbankPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"genbank\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, genbankPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from genbank slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for genbank") + } + + if len(genbankAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *Genbank) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindGenbank(ctx, exec, o.Accession) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *GenbankSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := GenbankSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), genbankPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"genbank\".* FROM \"genbank\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, genbankPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in GenbankSlice") + } + + *o = slice + + return nil +} + +// GenbankExists checks if the Genbank row exists. +func GenbankExists(ctx context.Context, exec boil.ContextExecutor, accession null.String) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"genbank\" where \"accession\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, accession) + } + row := exec.QueryRowContext(ctx, sql, accession) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if genbank exists") + } + + return exists, nil +} diff --git a/models/go_classification.go b/models/go_classification.go new file mode 100644 index 0000000..e6c9f58 --- /dev/null +++ b/models/go_classification.go @@ -0,0 +1,1094 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// GoClassification is an object representing the database table. +type GoClassification struct { + GoID string `boil:"go_id" json:"go_id" toml:"go_id" yaml:"go_id"` + ParentGoID null.String `boil:"parent_go_id" json:"parent_go_id,omitempty" toml:"parent_go_id" yaml:"parent_go_id,omitempty"` + PrefName null.String `boil:"pref_name" json:"pref_name,omitempty" toml:"pref_name" yaml:"pref_name,omitempty"` + ClassLevel null.Int16 `boil:"class_level" json:"class_level,omitempty" toml:"class_level" yaml:"class_level,omitempty"` + Aspect null.String `boil:"aspect" json:"aspect,omitempty" toml:"aspect" yaml:"aspect,omitempty"` + Path null.String `boil:"path" json:"path,omitempty" toml:"path" yaml:"path,omitempty"` + + R *goClassificationR `boil:"-" json:"-" toml:"-" yaml:"-"` + L goClassificationL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var GoClassificationColumns = struct { + GoID string + ParentGoID string + PrefName string + ClassLevel string + Aspect string + Path string +}{ + GoID: "go_id", + ParentGoID: "parent_go_id", + PrefName: "pref_name", + ClassLevel: "class_level", + Aspect: "aspect", + Path: "path", +} + +var GoClassificationTableColumns = struct { + GoID string + ParentGoID string + PrefName string + ClassLevel string + Aspect string + Path string +}{ + GoID: "go_classification.go_id", + ParentGoID: "go_classification.parent_go_id", + PrefName: "go_classification.pref_name", + ClassLevel: "go_classification.class_level", + Aspect: "go_classification.aspect", + Path: "go_classification.path", +} + +// Generated where + +var GoClassificationWhere = struct { + GoID whereHelperstring + ParentGoID whereHelpernull_String + PrefName whereHelpernull_String + ClassLevel whereHelpernull_Int16 + Aspect whereHelpernull_String + Path whereHelpernull_String +}{ + GoID: whereHelperstring{field: "\"go_classification\".\"go_id\""}, + ParentGoID: whereHelpernull_String{field: "\"go_classification\".\"parent_go_id\""}, + PrefName: whereHelpernull_String{field: "\"go_classification\".\"pref_name\""}, + ClassLevel: whereHelpernull_Int16{field: "\"go_classification\".\"class_level\""}, + Aspect: whereHelpernull_String{field: "\"go_classification\".\"aspect\""}, + Path: whereHelpernull_String{field: "\"go_classification\".\"path\""}, +} + +// GoClassificationRels is where relationship names are stored. +var GoClassificationRels = struct { + GoComponentGos string +}{ + GoComponentGos: "GoComponentGos", +} + +// goClassificationR is where relationships are stored. +type goClassificationR struct { + GoComponentGos ComponentGoSlice `boil:"GoComponentGos" json:"GoComponentGos" toml:"GoComponentGos" yaml:"GoComponentGos"` +} + +// NewStruct creates a new relationship struct +func (*goClassificationR) NewStruct() *goClassificationR { + return &goClassificationR{} +} + +func (r *goClassificationR) GetGoComponentGos() ComponentGoSlice { + if r == nil { + return nil + } + return r.GoComponentGos +} + +// goClassificationL is where Load methods for each relationship are stored. +type goClassificationL struct{} + +var ( + goClassificationAllColumns = []string{"go_id", "parent_go_id", "pref_name", "class_level", "aspect", "path"} + goClassificationColumnsWithoutDefault = []string{"go_id"} + goClassificationColumnsWithDefault = []string{"parent_go_id", "pref_name", "class_level", "aspect", "path"} + goClassificationPrimaryKeyColumns = []string{"go_id"} + goClassificationGeneratedColumns = []string{} +) + +type ( + // GoClassificationSlice is an alias for a slice of pointers to GoClassification. + // This should almost always be used instead of []GoClassification. + GoClassificationSlice []*GoClassification + // GoClassificationHook is the signature for custom GoClassification hook methods + GoClassificationHook func(context.Context, boil.ContextExecutor, *GoClassification) error + + goClassificationQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + goClassificationType = reflect.TypeOf(&GoClassification{}) + goClassificationMapping = queries.MakeStructMapping(goClassificationType) + goClassificationPrimaryKeyMapping, _ = queries.BindMapping(goClassificationType, goClassificationMapping, goClassificationPrimaryKeyColumns) + goClassificationInsertCacheMut sync.RWMutex + goClassificationInsertCache = make(map[string]insertCache) + goClassificationUpdateCacheMut sync.RWMutex + goClassificationUpdateCache = make(map[string]updateCache) + goClassificationUpsertCacheMut sync.RWMutex + goClassificationUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var goClassificationAfterSelectHooks []GoClassificationHook + +var goClassificationBeforeInsertHooks []GoClassificationHook +var goClassificationAfterInsertHooks []GoClassificationHook + +var goClassificationBeforeUpdateHooks []GoClassificationHook +var goClassificationAfterUpdateHooks []GoClassificationHook + +var goClassificationBeforeDeleteHooks []GoClassificationHook +var goClassificationAfterDeleteHooks []GoClassificationHook + +var goClassificationBeforeUpsertHooks []GoClassificationHook +var goClassificationAfterUpsertHooks []GoClassificationHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *GoClassification) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range goClassificationAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *GoClassification) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range goClassificationBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *GoClassification) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range goClassificationAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *GoClassification) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range goClassificationBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *GoClassification) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range goClassificationAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *GoClassification) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range goClassificationBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *GoClassification) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range goClassificationAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *GoClassification) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range goClassificationBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *GoClassification) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range goClassificationAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddGoClassificationHook registers your hook function for all future operations. +func AddGoClassificationHook(hookPoint boil.HookPoint, goClassificationHook GoClassificationHook) { + switch hookPoint { + case boil.AfterSelectHook: + goClassificationAfterSelectHooks = append(goClassificationAfterSelectHooks, goClassificationHook) + case boil.BeforeInsertHook: + goClassificationBeforeInsertHooks = append(goClassificationBeforeInsertHooks, goClassificationHook) + case boil.AfterInsertHook: + goClassificationAfterInsertHooks = append(goClassificationAfterInsertHooks, goClassificationHook) + case boil.BeforeUpdateHook: + goClassificationBeforeUpdateHooks = append(goClassificationBeforeUpdateHooks, goClassificationHook) + case boil.AfterUpdateHook: + goClassificationAfterUpdateHooks = append(goClassificationAfterUpdateHooks, goClassificationHook) + case boil.BeforeDeleteHook: + goClassificationBeforeDeleteHooks = append(goClassificationBeforeDeleteHooks, goClassificationHook) + case boil.AfterDeleteHook: + goClassificationAfterDeleteHooks = append(goClassificationAfterDeleteHooks, goClassificationHook) + case boil.BeforeUpsertHook: + goClassificationBeforeUpsertHooks = append(goClassificationBeforeUpsertHooks, goClassificationHook) + case boil.AfterUpsertHook: + goClassificationAfterUpsertHooks = append(goClassificationAfterUpsertHooks, goClassificationHook) + } +} + +// One returns a single goClassification record from the query. +func (q goClassificationQuery) One(ctx context.Context, exec boil.ContextExecutor) (*GoClassification, error) { + o := &GoClassification{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for go_classification") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all GoClassification records from the query. +func (q goClassificationQuery) All(ctx context.Context, exec boil.ContextExecutor) (GoClassificationSlice, error) { + var o []*GoClassification + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to GoClassification slice") + } + + if len(goClassificationAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all GoClassification records in the query. +func (q goClassificationQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count go_classification rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q goClassificationQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if go_classification exists") + } + + return count > 0, nil +} + +// GoComponentGos retrieves all the component_go's ComponentGos with an executor via go_id column. +func (o *GoClassification) GoComponentGos(mods ...qm.QueryMod) componentGoQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"component_go\".\"go_id\"=?", o.GoID), + ) + + return ComponentGos(queryMods...) +} + +// LoadGoComponentGos allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (goClassificationL) LoadGoComponentGos(ctx context.Context, e boil.ContextExecutor, singular bool, maybeGoClassification interface{}, mods queries.Applicator) error { + var slice []*GoClassification + var object *GoClassification + + if singular { + object = maybeGoClassification.(*GoClassification) + } else { + slice = *maybeGoClassification.(*[]*GoClassification) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &goClassificationR{} + } + args = append(args, object.GoID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &goClassificationR{} + } + + for _, a := range args { + if a == obj.GoID { + continue Outer + } + } + + args = append(args, obj.GoID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`component_go`), + qm.WhereIn(`component_go.go_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load component_go") + } + + var resultSlice []*ComponentGo + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice component_go") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on component_go") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for component_go") + } + + if len(componentGoAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.GoComponentGos = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &componentGoR{} + } + foreign.R.Go = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.GoID == foreign.GoID { + local.R.GoComponentGos = append(local.R.GoComponentGos, foreign) + if foreign.R == nil { + foreign.R = &componentGoR{} + } + foreign.R.Go = local + break + } + } + } + + return nil +} + +// AddGoComponentGos adds the given related objects to the existing relationships +// of the go_classification, optionally inserting them as new records. +// Appends related to o.R.GoComponentGos. +// Sets related.R.Go appropriately. +func (o *GoClassification) AddGoComponentGos(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*ComponentGo) error { + var err error + for _, rel := range related { + if insert { + rel.GoID = o.GoID + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"component_go\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"go_id"}), + strmangle.WhereClause("\"", "\"", 0, componentGoPrimaryKeyColumns), + ) + values := []interface{}{o.GoID, rel.CompGoID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.GoID = o.GoID + } + } + + if o.R == nil { + o.R = &goClassificationR{ + GoComponentGos: related, + } + } else { + o.R.GoComponentGos = append(o.R.GoComponentGos, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &componentGoR{ + Go: o, + } + } else { + rel.R.Go = o + } + } + return nil +} + +// GoClassifications retrieves all the records using an executor. +func GoClassifications(mods ...qm.QueryMod) goClassificationQuery { + mods = append(mods, qm.From("\"go_classification\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"go_classification\".*"}) + } + + return goClassificationQuery{q} +} + +// FindGoClassification retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindGoClassification(ctx context.Context, exec boil.ContextExecutor, goID string, selectCols ...string) (*GoClassification, error) { + goClassificationObj := &GoClassification{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"go_classification\" where \"go_id\"=?", sel, + ) + + q := queries.Raw(query, goID) + + err := q.Bind(ctx, exec, goClassificationObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from go_classification") + } + + if err = goClassificationObj.doAfterSelectHooks(ctx, exec); err != nil { + return goClassificationObj, err + } + + return goClassificationObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *GoClassification) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no go_classification provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(goClassificationColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + goClassificationInsertCacheMut.RLock() + cache, cached := goClassificationInsertCache[key] + goClassificationInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + goClassificationAllColumns, + goClassificationColumnsWithDefault, + goClassificationColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(goClassificationType, goClassificationMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(goClassificationType, goClassificationMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"go_classification\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"go_classification\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into go_classification") + } + + if !cached { + goClassificationInsertCacheMut.Lock() + goClassificationInsertCache[key] = cache + goClassificationInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the GoClassification. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *GoClassification) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + goClassificationUpdateCacheMut.RLock() + cache, cached := goClassificationUpdateCache[key] + goClassificationUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + goClassificationAllColumns, + goClassificationPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update go_classification, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"go_classification\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, goClassificationPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(goClassificationType, goClassificationMapping, append(wl, goClassificationPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update go_classification row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for go_classification") + } + + if !cached { + goClassificationUpdateCacheMut.Lock() + goClassificationUpdateCache[key] = cache + goClassificationUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q goClassificationQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for go_classification") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for go_classification") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o GoClassificationSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), goClassificationPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"go_classification\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, goClassificationPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in goClassification slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all goClassification") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *GoClassification) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no go_classification provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(goClassificationColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + goClassificationUpsertCacheMut.RLock() + cache, cached := goClassificationUpsertCache[key] + goClassificationUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + goClassificationAllColumns, + goClassificationColumnsWithDefault, + goClassificationColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + goClassificationAllColumns, + goClassificationPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert go_classification, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(goClassificationPrimaryKeyColumns)) + copy(conflict, goClassificationPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"go_classification\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(goClassificationType, goClassificationMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(goClassificationType, goClassificationMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert go_classification") + } + + if !cached { + goClassificationUpsertCacheMut.Lock() + goClassificationUpsertCache[key] = cache + goClassificationUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single GoClassification record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *GoClassification) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no GoClassification provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), goClassificationPrimaryKeyMapping) + sql := "DELETE FROM \"go_classification\" WHERE \"go_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from go_classification") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for go_classification") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q goClassificationQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no goClassificationQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from go_classification") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for go_classification") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o GoClassificationSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(goClassificationBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), goClassificationPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"go_classification\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, goClassificationPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from goClassification slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for go_classification") + } + + if len(goClassificationAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *GoClassification) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindGoClassification(ctx, exec, o.GoID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *GoClassificationSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := GoClassificationSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), goClassificationPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"go_classification\".* FROM \"go_classification\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, goClassificationPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in GoClassificationSlice") + } + + *o = slice + + return nil +} + +// GoClassificationExists checks if the GoClassification row exists. +func GoClassificationExists(ctx context.Context, exec boil.ContextExecutor, goID string) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"go_classification\" where \"go_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, goID) + } + row := exec.QueryRowContext(ctx, sql, goID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if go_classification exists") + } + + return exists, nil +} diff --git a/models/hrac_classification.go b/models/hrac_classification.go new file mode 100644 index 0000000..3e5dd83 --- /dev/null +++ b/models/hrac_classification.go @@ -0,0 +1,1108 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// HracClassification is an object representing the database table. +type HracClassification struct { + HracClassID int64 `boil:"hrac_class_id" json:"hrac_class_id" toml:"hrac_class_id" yaml:"hrac_class_id"` + ActiveIngredient string `boil:"active_ingredient" json:"active_ingredient" toml:"active_ingredient" yaml:"active_ingredient"` + Level1 string `boil:"level1" json:"level1" toml:"level1" yaml:"level1"` + Level1Description string `boil:"level1_description" json:"level1_description" toml:"level1_description" yaml:"level1_description"` + Level2 string `boil:"level2" json:"level2" toml:"level2" yaml:"level2"` + Level2Description null.String `boil:"level2_description" json:"level2_description,omitempty" toml:"level2_description" yaml:"level2_description,omitempty"` + Level3 string `boil:"level3" json:"level3" toml:"level3" yaml:"level3"` + HracCode string `boil:"hrac_code" json:"hrac_code" toml:"hrac_code" yaml:"hrac_code"` + + R *hracClassificationR `boil:"-" json:"-" toml:"-" yaml:"-"` + L hracClassificationL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var HracClassificationColumns = struct { + HracClassID string + ActiveIngredient string + Level1 string + Level1Description string + Level2 string + Level2Description string + Level3 string + HracCode string +}{ + HracClassID: "hrac_class_id", + ActiveIngredient: "active_ingredient", + Level1: "level1", + Level1Description: "level1_description", + Level2: "level2", + Level2Description: "level2_description", + Level3: "level3", + HracCode: "hrac_code", +} + +var HracClassificationTableColumns = struct { + HracClassID string + ActiveIngredient string + Level1 string + Level1Description string + Level2 string + Level2Description string + Level3 string + HracCode string +}{ + HracClassID: "hrac_classification.hrac_class_id", + ActiveIngredient: "hrac_classification.active_ingredient", + Level1: "hrac_classification.level1", + Level1Description: "hrac_classification.level1_description", + Level2: "hrac_classification.level2", + Level2Description: "hrac_classification.level2_description", + Level3: "hrac_classification.level3", + HracCode: "hrac_classification.hrac_code", +} + +// Generated where + +var HracClassificationWhere = struct { + HracClassID whereHelperint64 + ActiveIngredient whereHelperstring + Level1 whereHelperstring + Level1Description whereHelperstring + Level2 whereHelperstring + Level2Description whereHelpernull_String + Level3 whereHelperstring + HracCode whereHelperstring +}{ + HracClassID: whereHelperint64{field: "\"hrac_classification\".\"hrac_class_id\""}, + ActiveIngredient: whereHelperstring{field: "\"hrac_classification\".\"active_ingredient\""}, + Level1: whereHelperstring{field: "\"hrac_classification\".\"level1\""}, + Level1Description: whereHelperstring{field: "\"hrac_classification\".\"level1_description\""}, + Level2: whereHelperstring{field: "\"hrac_classification\".\"level2\""}, + Level2Description: whereHelpernull_String{field: "\"hrac_classification\".\"level2_description\""}, + Level3: whereHelperstring{field: "\"hrac_classification\".\"level3\""}, + HracCode: whereHelperstring{field: "\"hrac_classification\".\"hrac_code\""}, +} + +// HracClassificationRels is where relationship names are stored. +var HracClassificationRels = struct { + HracClassMoleculeHracClassifications string +}{ + HracClassMoleculeHracClassifications: "HracClassMoleculeHracClassifications", +} + +// hracClassificationR is where relationships are stored. +type hracClassificationR struct { + HracClassMoleculeHracClassifications MoleculeHracClassificationSlice `boil:"HracClassMoleculeHracClassifications" json:"HracClassMoleculeHracClassifications" toml:"HracClassMoleculeHracClassifications" yaml:"HracClassMoleculeHracClassifications"` +} + +// NewStruct creates a new relationship struct +func (*hracClassificationR) NewStruct() *hracClassificationR { + return &hracClassificationR{} +} + +func (r *hracClassificationR) GetHracClassMoleculeHracClassifications() MoleculeHracClassificationSlice { + if r == nil { + return nil + } + return r.HracClassMoleculeHracClassifications +} + +// hracClassificationL is where Load methods for each relationship are stored. +type hracClassificationL struct{} + +var ( + hracClassificationAllColumns = []string{"hrac_class_id", "active_ingredient", "level1", "level1_description", "level2", "level2_description", "level3", "hrac_code"} + hracClassificationColumnsWithoutDefault = []string{"hrac_class_id", "active_ingredient", "level1", "level1_description", "level2", "level3", "hrac_code"} + hracClassificationColumnsWithDefault = []string{"level2_description"} + hracClassificationPrimaryKeyColumns = []string{"hrac_class_id"} + hracClassificationGeneratedColumns = []string{} +) + +type ( + // HracClassificationSlice is an alias for a slice of pointers to HracClassification. + // This should almost always be used instead of []HracClassification. + HracClassificationSlice []*HracClassification + // HracClassificationHook is the signature for custom HracClassification hook methods + HracClassificationHook func(context.Context, boil.ContextExecutor, *HracClassification) error + + hracClassificationQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + hracClassificationType = reflect.TypeOf(&HracClassification{}) + hracClassificationMapping = queries.MakeStructMapping(hracClassificationType) + hracClassificationPrimaryKeyMapping, _ = queries.BindMapping(hracClassificationType, hracClassificationMapping, hracClassificationPrimaryKeyColumns) + hracClassificationInsertCacheMut sync.RWMutex + hracClassificationInsertCache = make(map[string]insertCache) + hracClassificationUpdateCacheMut sync.RWMutex + hracClassificationUpdateCache = make(map[string]updateCache) + hracClassificationUpsertCacheMut sync.RWMutex + hracClassificationUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var hracClassificationAfterSelectHooks []HracClassificationHook + +var hracClassificationBeforeInsertHooks []HracClassificationHook +var hracClassificationAfterInsertHooks []HracClassificationHook + +var hracClassificationBeforeUpdateHooks []HracClassificationHook +var hracClassificationAfterUpdateHooks []HracClassificationHook + +var hracClassificationBeforeDeleteHooks []HracClassificationHook +var hracClassificationAfterDeleteHooks []HracClassificationHook + +var hracClassificationBeforeUpsertHooks []HracClassificationHook +var hracClassificationAfterUpsertHooks []HracClassificationHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *HracClassification) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range hracClassificationAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *HracClassification) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range hracClassificationBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *HracClassification) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range hracClassificationAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *HracClassification) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range hracClassificationBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *HracClassification) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range hracClassificationAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *HracClassification) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range hracClassificationBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *HracClassification) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range hracClassificationAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *HracClassification) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range hracClassificationBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *HracClassification) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range hracClassificationAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddHracClassificationHook registers your hook function for all future operations. +func AddHracClassificationHook(hookPoint boil.HookPoint, hracClassificationHook HracClassificationHook) { + switch hookPoint { + case boil.AfterSelectHook: + hracClassificationAfterSelectHooks = append(hracClassificationAfterSelectHooks, hracClassificationHook) + case boil.BeforeInsertHook: + hracClassificationBeforeInsertHooks = append(hracClassificationBeforeInsertHooks, hracClassificationHook) + case boil.AfterInsertHook: + hracClassificationAfterInsertHooks = append(hracClassificationAfterInsertHooks, hracClassificationHook) + case boil.BeforeUpdateHook: + hracClassificationBeforeUpdateHooks = append(hracClassificationBeforeUpdateHooks, hracClassificationHook) + case boil.AfterUpdateHook: + hracClassificationAfterUpdateHooks = append(hracClassificationAfterUpdateHooks, hracClassificationHook) + case boil.BeforeDeleteHook: + hracClassificationBeforeDeleteHooks = append(hracClassificationBeforeDeleteHooks, hracClassificationHook) + case boil.AfterDeleteHook: + hracClassificationAfterDeleteHooks = append(hracClassificationAfterDeleteHooks, hracClassificationHook) + case boil.BeforeUpsertHook: + hracClassificationBeforeUpsertHooks = append(hracClassificationBeforeUpsertHooks, hracClassificationHook) + case boil.AfterUpsertHook: + hracClassificationAfterUpsertHooks = append(hracClassificationAfterUpsertHooks, hracClassificationHook) + } +} + +// One returns a single hracClassification record from the query. +func (q hracClassificationQuery) One(ctx context.Context, exec boil.ContextExecutor) (*HracClassification, error) { + o := &HracClassification{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for hrac_classification") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all HracClassification records from the query. +func (q hracClassificationQuery) All(ctx context.Context, exec boil.ContextExecutor) (HracClassificationSlice, error) { + var o []*HracClassification + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to HracClassification slice") + } + + if len(hracClassificationAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all HracClassification records in the query. +func (q hracClassificationQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count hrac_classification rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q hracClassificationQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if hrac_classification exists") + } + + return count > 0, nil +} + +// HracClassMoleculeHracClassifications retrieves all the molecule_hrac_classification's MoleculeHracClassifications with an executor via hrac_class_id column. +func (o *HracClassification) HracClassMoleculeHracClassifications(mods ...qm.QueryMod) moleculeHracClassificationQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"molecule_hrac_classification\".\"hrac_class_id\"=?", o.HracClassID), + ) + + return MoleculeHracClassifications(queryMods...) +} + +// LoadHracClassMoleculeHracClassifications allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (hracClassificationL) LoadHracClassMoleculeHracClassifications(ctx context.Context, e boil.ContextExecutor, singular bool, maybeHracClassification interface{}, mods queries.Applicator) error { + var slice []*HracClassification + var object *HracClassification + + if singular { + object = maybeHracClassification.(*HracClassification) + } else { + slice = *maybeHracClassification.(*[]*HracClassification) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &hracClassificationR{} + } + args = append(args, object.HracClassID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &hracClassificationR{} + } + + for _, a := range args { + if a == obj.HracClassID { + continue Outer + } + } + + args = append(args, obj.HracClassID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`molecule_hrac_classification`), + qm.WhereIn(`molecule_hrac_classification.hrac_class_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load molecule_hrac_classification") + } + + var resultSlice []*MoleculeHracClassification + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice molecule_hrac_classification") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on molecule_hrac_classification") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for molecule_hrac_classification") + } + + if len(moleculeHracClassificationAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.HracClassMoleculeHracClassifications = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &moleculeHracClassificationR{} + } + foreign.R.HracClass = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.HracClassID == foreign.HracClassID { + local.R.HracClassMoleculeHracClassifications = append(local.R.HracClassMoleculeHracClassifications, foreign) + if foreign.R == nil { + foreign.R = &moleculeHracClassificationR{} + } + foreign.R.HracClass = local + break + } + } + } + + return nil +} + +// AddHracClassMoleculeHracClassifications adds the given related objects to the existing relationships +// of the hrac_classification, optionally inserting them as new records. +// Appends related to o.R.HracClassMoleculeHracClassifications. +// Sets related.R.HracClass appropriately. +func (o *HracClassification) AddHracClassMoleculeHracClassifications(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*MoleculeHracClassification) error { + var err error + for _, rel := range related { + if insert { + rel.HracClassID = o.HracClassID + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"molecule_hrac_classification\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"hrac_class_id"}), + strmangle.WhereClause("\"", "\"", 0, moleculeHracClassificationPrimaryKeyColumns), + ) + values := []interface{}{o.HracClassID, rel.MolHracID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.HracClassID = o.HracClassID + } + } + + if o.R == nil { + o.R = &hracClassificationR{ + HracClassMoleculeHracClassifications: related, + } + } else { + o.R.HracClassMoleculeHracClassifications = append(o.R.HracClassMoleculeHracClassifications, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &moleculeHracClassificationR{ + HracClass: o, + } + } else { + rel.R.HracClass = o + } + } + return nil +} + +// HracClassifications retrieves all the records using an executor. +func HracClassifications(mods ...qm.QueryMod) hracClassificationQuery { + mods = append(mods, qm.From("\"hrac_classification\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"hrac_classification\".*"}) + } + + return hracClassificationQuery{q} +} + +// FindHracClassification retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindHracClassification(ctx context.Context, exec boil.ContextExecutor, hracClassID int64, selectCols ...string) (*HracClassification, error) { + hracClassificationObj := &HracClassification{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"hrac_classification\" where \"hrac_class_id\"=?", sel, + ) + + q := queries.Raw(query, hracClassID) + + err := q.Bind(ctx, exec, hracClassificationObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from hrac_classification") + } + + if err = hracClassificationObj.doAfterSelectHooks(ctx, exec); err != nil { + return hracClassificationObj, err + } + + return hracClassificationObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *HracClassification) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no hrac_classification provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(hracClassificationColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + hracClassificationInsertCacheMut.RLock() + cache, cached := hracClassificationInsertCache[key] + hracClassificationInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + hracClassificationAllColumns, + hracClassificationColumnsWithDefault, + hracClassificationColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(hracClassificationType, hracClassificationMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(hracClassificationType, hracClassificationMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"hrac_classification\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"hrac_classification\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into hrac_classification") + } + + if !cached { + hracClassificationInsertCacheMut.Lock() + hracClassificationInsertCache[key] = cache + hracClassificationInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the HracClassification. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *HracClassification) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + hracClassificationUpdateCacheMut.RLock() + cache, cached := hracClassificationUpdateCache[key] + hracClassificationUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + hracClassificationAllColumns, + hracClassificationPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update hrac_classification, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"hrac_classification\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, hracClassificationPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(hracClassificationType, hracClassificationMapping, append(wl, hracClassificationPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update hrac_classification row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for hrac_classification") + } + + if !cached { + hracClassificationUpdateCacheMut.Lock() + hracClassificationUpdateCache[key] = cache + hracClassificationUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q hracClassificationQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for hrac_classification") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for hrac_classification") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o HracClassificationSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), hracClassificationPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"hrac_classification\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, hracClassificationPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in hracClassification slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all hracClassification") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *HracClassification) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no hrac_classification provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(hracClassificationColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + hracClassificationUpsertCacheMut.RLock() + cache, cached := hracClassificationUpsertCache[key] + hracClassificationUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + hracClassificationAllColumns, + hracClassificationColumnsWithDefault, + hracClassificationColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + hracClassificationAllColumns, + hracClassificationPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert hrac_classification, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(hracClassificationPrimaryKeyColumns)) + copy(conflict, hracClassificationPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"hrac_classification\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(hracClassificationType, hracClassificationMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(hracClassificationType, hracClassificationMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert hrac_classification") + } + + if !cached { + hracClassificationUpsertCacheMut.Lock() + hracClassificationUpsertCache[key] = cache + hracClassificationUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single HracClassification record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *HracClassification) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no HracClassification provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), hracClassificationPrimaryKeyMapping) + sql := "DELETE FROM \"hrac_classification\" WHERE \"hrac_class_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from hrac_classification") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for hrac_classification") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q hracClassificationQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no hracClassificationQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from hrac_classification") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for hrac_classification") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o HracClassificationSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(hracClassificationBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), hracClassificationPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"hrac_classification\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, hracClassificationPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from hracClassification slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for hrac_classification") + } + + if len(hracClassificationAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *HracClassification) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindHracClassification(ctx, exec, o.HracClassID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *HracClassificationSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := HracClassificationSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), hracClassificationPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"hrac_classification\".* FROM \"hrac_classification\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, hracClassificationPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in HracClassificationSlice") + } + + *o = slice + + return nil +} + +// HracClassificationExists checks if the HracClassification row exists. +func HracClassificationExists(ctx context.Context, exec boil.ContextExecutor, hracClassID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"hrac_classification\" where \"hrac_class_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, hracClassID) + } + row := exec.QueryRowContext(ctx, sql, hracClassID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if hrac_classification exists") + } + + return exists, nil +} diff --git a/models/indication_refs.go b/models/indication_refs.go new file mode 100644 index 0000000..b350a64 --- /dev/null +++ b/models/indication_refs.go @@ -0,0 +1,1083 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// IndicationRef is an object representing the database table. +type IndicationRef struct { + IndrefID int64 `boil:"indref_id" json:"indref_id" toml:"indref_id" yaml:"indref_id"` + DrugindID int64 `boil:"drugind_id" json:"drugind_id" toml:"drugind_id" yaml:"drugind_id"` + RefType string `boil:"ref_type" json:"ref_type" toml:"ref_type" yaml:"ref_type"` + RefID string `boil:"ref_id" json:"ref_id" toml:"ref_id" yaml:"ref_id"` + RefURL string `boil:"ref_url" json:"ref_url" toml:"ref_url" yaml:"ref_url"` + + R *indicationRefR `boil:"-" json:"-" toml:"-" yaml:"-"` + L indicationRefL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var IndicationRefColumns = struct { + IndrefID string + DrugindID string + RefType string + RefID string + RefURL string +}{ + IndrefID: "indref_id", + DrugindID: "drugind_id", + RefType: "ref_type", + RefID: "ref_id", + RefURL: "ref_url", +} + +var IndicationRefTableColumns = struct { + IndrefID string + DrugindID string + RefType string + RefID string + RefURL string +}{ + IndrefID: "indication_refs.indref_id", + DrugindID: "indication_refs.drugind_id", + RefType: "indication_refs.ref_type", + RefID: "indication_refs.ref_id", + RefURL: "indication_refs.ref_url", +} + +// Generated where + +var IndicationRefWhere = struct { + IndrefID whereHelperint64 + DrugindID whereHelperint64 + RefType whereHelperstring + RefID whereHelperstring + RefURL whereHelperstring +}{ + IndrefID: whereHelperint64{field: "\"indication_refs\".\"indref_id\""}, + DrugindID: whereHelperint64{field: "\"indication_refs\".\"drugind_id\""}, + RefType: whereHelperstring{field: "\"indication_refs\".\"ref_type\""}, + RefID: whereHelperstring{field: "\"indication_refs\".\"ref_id\""}, + RefURL: whereHelperstring{field: "\"indication_refs\".\"ref_url\""}, +} + +// IndicationRefRels is where relationship names are stored. +var IndicationRefRels = struct { + Drugind string +}{ + Drugind: "Drugind", +} + +// indicationRefR is where relationships are stored. +type indicationRefR struct { + Drugind *DrugIndication `boil:"Drugind" json:"Drugind" toml:"Drugind" yaml:"Drugind"` +} + +// NewStruct creates a new relationship struct +func (*indicationRefR) NewStruct() *indicationRefR { + return &indicationRefR{} +} + +func (r *indicationRefR) GetDrugind() *DrugIndication { + if r == nil { + return nil + } + return r.Drugind +} + +// indicationRefL is where Load methods for each relationship are stored. +type indicationRefL struct{} + +var ( + indicationRefAllColumns = []string{"indref_id", "drugind_id", "ref_type", "ref_id", "ref_url"} + indicationRefColumnsWithoutDefault = []string{"indref_id", "drugind_id", "ref_type", "ref_id", "ref_url"} + indicationRefColumnsWithDefault = []string{} + indicationRefPrimaryKeyColumns = []string{"indref_id"} + indicationRefGeneratedColumns = []string{} +) + +type ( + // IndicationRefSlice is an alias for a slice of pointers to IndicationRef. + // This should almost always be used instead of []IndicationRef. + IndicationRefSlice []*IndicationRef + // IndicationRefHook is the signature for custom IndicationRef hook methods + IndicationRefHook func(context.Context, boil.ContextExecutor, *IndicationRef) error + + indicationRefQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + indicationRefType = reflect.TypeOf(&IndicationRef{}) + indicationRefMapping = queries.MakeStructMapping(indicationRefType) + indicationRefPrimaryKeyMapping, _ = queries.BindMapping(indicationRefType, indicationRefMapping, indicationRefPrimaryKeyColumns) + indicationRefInsertCacheMut sync.RWMutex + indicationRefInsertCache = make(map[string]insertCache) + indicationRefUpdateCacheMut sync.RWMutex + indicationRefUpdateCache = make(map[string]updateCache) + indicationRefUpsertCacheMut sync.RWMutex + indicationRefUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var indicationRefAfterSelectHooks []IndicationRefHook + +var indicationRefBeforeInsertHooks []IndicationRefHook +var indicationRefAfterInsertHooks []IndicationRefHook + +var indicationRefBeforeUpdateHooks []IndicationRefHook +var indicationRefAfterUpdateHooks []IndicationRefHook + +var indicationRefBeforeDeleteHooks []IndicationRefHook +var indicationRefAfterDeleteHooks []IndicationRefHook + +var indicationRefBeforeUpsertHooks []IndicationRefHook +var indicationRefAfterUpsertHooks []IndicationRefHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *IndicationRef) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range indicationRefAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *IndicationRef) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range indicationRefBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *IndicationRef) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range indicationRefAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *IndicationRef) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range indicationRefBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *IndicationRef) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range indicationRefAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *IndicationRef) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range indicationRefBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *IndicationRef) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range indicationRefAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *IndicationRef) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range indicationRefBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *IndicationRef) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range indicationRefAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddIndicationRefHook registers your hook function for all future operations. +func AddIndicationRefHook(hookPoint boil.HookPoint, indicationRefHook IndicationRefHook) { + switch hookPoint { + case boil.AfterSelectHook: + indicationRefAfterSelectHooks = append(indicationRefAfterSelectHooks, indicationRefHook) + case boil.BeforeInsertHook: + indicationRefBeforeInsertHooks = append(indicationRefBeforeInsertHooks, indicationRefHook) + case boil.AfterInsertHook: + indicationRefAfterInsertHooks = append(indicationRefAfterInsertHooks, indicationRefHook) + case boil.BeforeUpdateHook: + indicationRefBeforeUpdateHooks = append(indicationRefBeforeUpdateHooks, indicationRefHook) + case boil.AfterUpdateHook: + indicationRefAfterUpdateHooks = append(indicationRefAfterUpdateHooks, indicationRefHook) + case boil.BeforeDeleteHook: + indicationRefBeforeDeleteHooks = append(indicationRefBeforeDeleteHooks, indicationRefHook) + case boil.AfterDeleteHook: + indicationRefAfterDeleteHooks = append(indicationRefAfterDeleteHooks, indicationRefHook) + case boil.BeforeUpsertHook: + indicationRefBeforeUpsertHooks = append(indicationRefBeforeUpsertHooks, indicationRefHook) + case boil.AfterUpsertHook: + indicationRefAfterUpsertHooks = append(indicationRefAfterUpsertHooks, indicationRefHook) + } +} + +// One returns a single indicationRef record from the query. +func (q indicationRefQuery) One(ctx context.Context, exec boil.ContextExecutor) (*IndicationRef, error) { + o := &IndicationRef{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for indication_refs") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all IndicationRef records from the query. +func (q indicationRefQuery) All(ctx context.Context, exec boil.ContextExecutor) (IndicationRefSlice, error) { + var o []*IndicationRef + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to IndicationRef slice") + } + + if len(indicationRefAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all IndicationRef records in the query. +func (q indicationRefQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count indication_refs rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q indicationRefQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if indication_refs exists") + } + + return count > 0, nil +} + +// Drugind pointed to by the foreign key. +func (o *IndicationRef) Drugind(mods ...qm.QueryMod) drugIndicationQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"drugind_id\" = ?", o.DrugindID), + } + + queryMods = append(queryMods, mods...) + + return DrugIndications(queryMods...) +} + +// LoadDrugind allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (indicationRefL) LoadDrugind(ctx context.Context, e boil.ContextExecutor, singular bool, maybeIndicationRef interface{}, mods queries.Applicator) error { + var slice []*IndicationRef + var object *IndicationRef + + if singular { + object = maybeIndicationRef.(*IndicationRef) + } else { + slice = *maybeIndicationRef.(*[]*IndicationRef) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &indicationRefR{} + } + args = append(args, object.DrugindID) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &indicationRefR{} + } + + for _, a := range args { + if a == obj.DrugindID { + continue Outer + } + } + + args = append(args, obj.DrugindID) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`drug_indication`), + qm.WhereIn(`drug_indication.drugind_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load DrugIndication") + } + + var resultSlice []*DrugIndication + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice DrugIndication") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for drug_indication") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for drug_indication") + } + + if len(indicationRefAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Drugind = foreign + if foreign.R == nil { + foreign.R = &drugIndicationR{} + } + foreign.R.DrugindIndicationRefs = append(foreign.R.DrugindIndicationRefs, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.DrugindID == foreign.DrugindID { + local.R.Drugind = foreign + if foreign.R == nil { + foreign.R = &drugIndicationR{} + } + foreign.R.DrugindIndicationRefs = append(foreign.R.DrugindIndicationRefs, local) + break + } + } + } + + return nil +} + +// SetDrugind of the indicationRef to the related item. +// Sets o.R.Drugind to related. +// Adds o to related.R.DrugindIndicationRefs. +func (o *IndicationRef) SetDrugind(ctx context.Context, exec boil.ContextExecutor, insert bool, related *DrugIndication) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"indication_refs\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"drugind_id"}), + strmangle.WhereClause("\"", "\"", 0, indicationRefPrimaryKeyColumns), + ) + values := []interface{}{related.DrugindID, o.IndrefID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.DrugindID = related.DrugindID + if o.R == nil { + o.R = &indicationRefR{ + Drugind: related, + } + } else { + o.R.Drugind = related + } + + if related.R == nil { + related.R = &drugIndicationR{ + DrugindIndicationRefs: IndicationRefSlice{o}, + } + } else { + related.R.DrugindIndicationRefs = append(related.R.DrugindIndicationRefs, o) + } + + return nil +} + +// IndicationRefs retrieves all the records using an executor. +func IndicationRefs(mods ...qm.QueryMod) indicationRefQuery { + mods = append(mods, qm.From("\"indication_refs\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"indication_refs\".*"}) + } + + return indicationRefQuery{q} +} + +// FindIndicationRef retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindIndicationRef(ctx context.Context, exec boil.ContextExecutor, indrefID int64, selectCols ...string) (*IndicationRef, error) { + indicationRefObj := &IndicationRef{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"indication_refs\" where \"indref_id\"=?", sel, + ) + + q := queries.Raw(query, indrefID) + + err := q.Bind(ctx, exec, indicationRefObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from indication_refs") + } + + if err = indicationRefObj.doAfterSelectHooks(ctx, exec); err != nil { + return indicationRefObj, err + } + + return indicationRefObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *IndicationRef) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no indication_refs provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(indicationRefColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + indicationRefInsertCacheMut.RLock() + cache, cached := indicationRefInsertCache[key] + indicationRefInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + indicationRefAllColumns, + indicationRefColumnsWithDefault, + indicationRefColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(indicationRefType, indicationRefMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(indicationRefType, indicationRefMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"indication_refs\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"indication_refs\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into indication_refs") + } + + if !cached { + indicationRefInsertCacheMut.Lock() + indicationRefInsertCache[key] = cache + indicationRefInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the IndicationRef. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *IndicationRef) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + indicationRefUpdateCacheMut.RLock() + cache, cached := indicationRefUpdateCache[key] + indicationRefUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + indicationRefAllColumns, + indicationRefPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update indication_refs, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"indication_refs\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, indicationRefPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(indicationRefType, indicationRefMapping, append(wl, indicationRefPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update indication_refs row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for indication_refs") + } + + if !cached { + indicationRefUpdateCacheMut.Lock() + indicationRefUpdateCache[key] = cache + indicationRefUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q indicationRefQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for indication_refs") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for indication_refs") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o IndicationRefSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), indicationRefPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"indication_refs\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, indicationRefPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in indicationRef slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all indicationRef") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *IndicationRef) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no indication_refs provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(indicationRefColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + indicationRefUpsertCacheMut.RLock() + cache, cached := indicationRefUpsertCache[key] + indicationRefUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + indicationRefAllColumns, + indicationRefColumnsWithDefault, + indicationRefColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + indicationRefAllColumns, + indicationRefPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert indication_refs, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(indicationRefPrimaryKeyColumns)) + copy(conflict, indicationRefPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"indication_refs\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(indicationRefType, indicationRefMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(indicationRefType, indicationRefMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert indication_refs") + } + + if !cached { + indicationRefUpsertCacheMut.Lock() + indicationRefUpsertCache[key] = cache + indicationRefUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single IndicationRef record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *IndicationRef) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no IndicationRef provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), indicationRefPrimaryKeyMapping) + sql := "DELETE FROM \"indication_refs\" WHERE \"indref_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from indication_refs") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for indication_refs") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q indicationRefQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no indicationRefQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from indication_refs") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for indication_refs") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o IndicationRefSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(indicationRefBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), indicationRefPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"indication_refs\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, indicationRefPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from indicationRef slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for indication_refs") + } + + if len(indicationRefAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *IndicationRef) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindIndicationRef(ctx, exec, o.IndrefID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *IndicationRefSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := IndicationRefSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), indicationRefPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"indication_refs\".* FROM \"indication_refs\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, indicationRefPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in IndicationRefSlice") + } + + *o = slice + + return nil +} + +// IndicationRefExists checks if the IndicationRef row exists. +func IndicationRefExists(ctx context.Context, exec boil.ContextExecutor, indrefID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"indication_refs\" where \"indref_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, indrefID) + } + row := exec.QueryRowContext(ctx, sql, indrefID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if indication_refs exists") + } + + return exists, nil +} diff --git a/models/irac_classification.go b/models/irac_classification.go new file mode 100644 index 0000000..8c5a472 --- /dev/null +++ b/models/irac_classification.go @@ -0,0 +1,1121 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// IracClassification is an object representing the database table. +type IracClassification struct { + IracClassID int64 `boil:"irac_class_id" json:"irac_class_id" toml:"irac_class_id" yaml:"irac_class_id"` + ActiveIngredient string `boil:"active_ingredient" json:"active_ingredient" toml:"active_ingredient" yaml:"active_ingredient"` + Level1 string `boil:"level1" json:"level1" toml:"level1" yaml:"level1"` + Level1Description string `boil:"level1_description" json:"level1_description" toml:"level1_description" yaml:"level1_description"` + Level2 string `boil:"level2" json:"level2" toml:"level2" yaml:"level2"` + Level2Description string `boil:"level2_description" json:"level2_description" toml:"level2_description" yaml:"level2_description"` + Level3 string `boil:"level3" json:"level3" toml:"level3" yaml:"level3"` + Level3Description string `boil:"level3_description" json:"level3_description" toml:"level3_description" yaml:"level3_description"` + Level4 string `boil:"level4" json:"level4" toml:"level4" yaml:"level4"` + IracCode string `boil:"irac_code" json:"irac_code" toml:"irac_code" yaml:"irac_code"` + + R *iracClassificationR `boil:"-" json:"-" toml:"-" yaml:"-"` + L iracClassificationL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var IracClassificationColumns = struct { + IracClassID string + ActiveIngredient string + Level1 string + Level1Description string + Level2 string + Level2Description string + Level3 string + Level3Description string + Level4 string + IracCode string +}{ + IracClassID: "irac_class_id", + ActiveIngredient: "active_ingredient", + Level1: "level1", + Level1Description: "level1_description", + Level2: "level2", + Level2Description: "level2_description", + Level3: "level3", + Level3Description: "level3_description", + Level4: "level4", + IracCode: "irac_code", +} + +var IracClassificationTableColumns = struct { + IracClassID string + ActiveIngredient string + Level1 string + Level1Description string + Level2 string + Level2Description string + Level3 string + Level3Description string + Level4 string + IracCode string +}{ + IracClassID: "irac_classification.irac_class_id", + ActiveIngredient: "irac_classification.active_ingredient", + Level1: "irac_classification.level1", + Level1Description: "irac_classification.level1_description", + Level2: "irac_classification.level2", + Level2Description: "irac_classification.level2_description", + Level3: "irac_classification.level3", + Level3Description: "irac_classification.level3_description", + Level4: "irac_classification.level4", + IracCode: "irac_classification.irac_code", +} + +// Generated where + +var IracClassificationWhere = struct { + IracClassID whereHelperint64 + ActiveIngredient whereHelperstring + Level1 whereHelperstring + Level1Description whereHelperstring + Level2 whereHelperstring + Level2Description whereHelperstring + Level3 whereHelperstring + Level3Description whereHelperstring + Level4 whereHelperstring + IracCode whereHelperstring +}{ + IracClassID: whereHelperint64{field: "\"irac_classification\".\"irac_class_id\""}, + ActiveIngredient: whereHelperstring{field: "\"irac_classification\".\"active_ingredient\""}, + Level1: whereHelperstring{field: "\"irac_classification\".\"level1\""}, + Level1Description: whereHelperstring{field: "\"irac_classification\".\"level1_description\""}, + Level2: whereHelperstring{field: "\"irac_classification\".\"level2\""}, + Level2Description: whereHelperstring{field: "\"irac_classification\".\"level2_description\""}, + Level3: whereHelperstring{field: "\"irac_classification\".\"level3\""}, + Level3Description: whereHelperstring{field: "\"irac_classification\".\"level3_description\""}, + Level4: whereHelperstring{field: "\"irac_classification\".\"level4\""}, + IracCode: whereHelperstring{field: "\"irac_classification\".\"irac_code\""}, +} + +// IracClassificationRels is where relationship names are stored. +var IracClassificationRels = struct { + IracClassMoleculeIracClassifications string +}{ + IracClassMoleculeIracClassifications: "IracClassMoleculeIracClassifications", +} + +// iracClassificationR is where relationships are stored. +type iracClassificationR struct { + IracClassMoleculeIracClassifications MoleculeIracClassificationSlice `boil:"IracClassMoleculeIracClassifications" json:"IracClassMoleculeIracClassifications" toml:"IracClassMoleculeIracClassifications" yaml:"IracClassMoleculeIracClassifications"` +} + +// NewStruct creates a new relationship struct +func (*iracClassificationR) NewStruct() *iracClassificationR { + return &iracClassificationR{} +} + +func (r *iracClassificationR) GetIracClassMoleculeIracClassifications() MoleculeIracClassificationSlice { + if r == nil { + return nil + } + return r.IracClassMoleculeIracClassifications +} + +// iracClassificationL is where Load methods for each relationship are stored. +type iracClassificationL struct{} + +var ( + iracClassificationAllColumns = []string{"irac_class_id", "active_ingredient", "level1", "level1_description", "level2", "level2_description", "level3", "level3_description", "level4", "irac_code"} + iracClassificationColumnsWithoutDefault = []string{"irac_class_id", "active_ingredient", "level1", "level1_description", "level2", "level2_description", "level3", "level3_description", "level4", "irac_code"} + iracClassificationColumnsWithDefault = []string{} + iracClassificationPrimaryKeyColumns = []string{"irac_class_id"} + iracClassificationGeneratedColumns = []string{} +) + +type ( + // IracClassificationSlice is an alias for a slice of pointers to IracClassification. + // This should almost always be used instead of []IracClassification. + IracClassificationSlice []*IracClassification + // IracClassificationHook is the signature for custom IracClassification hook methods + IracClassificationHook func(context.Context, boil.ContextExecutor, *IracClassification) error + + iracClassificationQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + iracClassificationType = reflect.TypeOf(&IracClassification{}) + iracClassificationMapping = queries.MakeStructMapping(iracClassificationType) + iracClassificationPrimaryKeyMapping, _ = queries.BindMapping(iracClassificationType, iracClassificationMapping, iracClassificationPrimaryKeyColumns) + iracClassificationInsertCacheMut sync.RWMutex + iracClassificationInsertCache = make(map[string]insertCache) + iracClassificationUpdateCacheMut sync.RWMutex + iracClassificationUpdateCache = make(map[string]updateCache) + iracClassificationUpsertCacheMut sync.RWMutex + iracClassificationUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var iracClassificationAfterSelectHooks []IracClassificationHook + +var iracClassificationBeforeInsertHooks []IracClassificationHook +var iracClassificationAfterInsertHooks []IracClassificationHook + +var iracClassificationBeforeUpdateHooks []IracClassificationHook +var iracClassificationAfterUpdateHooks []IracClassificationHook + +var iracClassificationBeforeDeleteHooks []IracClassificationHook +var iracClassificationAfterDeleteHooks []IracClassificationHook + +var iracClassificationBeforeUpsertHooks []IracClassificationHook +var iracClassificationAfterUpsertHooks []IracClassificationHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *IracClassification) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range iracClassificationAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *IracClassification) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range iracClassificationBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *IracClassification) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range iracClassificationAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *IracClassification) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range iracClassificationBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *IracClassification) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range iracClassificationAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *IracClassification) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range iracClassificationBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *IracClassification) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range iracClassificationAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *IracClassification) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range iracClassificationBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *IracClassification) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range iracClassificationAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddIracClassificationHook registers your hook function for all future operations. +func AddIracClassificationHook(hookPoint boil.HookPoint, iracClassificationHook IracClassificationHook) { + switch hookPoint { + case boil.AfterSelectHook: + iracClassificationAfterSelectHooks = append(iracClassificationAfterSelectHooks, iracClassificationHook) + case boil.BeforeInsertHook: + iracClassificationBeforeInsertHooks = append(iracClassificationBeforeInsertHooks, iracClassificationHook) + case boil.AfterInsertHook: + iracClassificationAfterInsertHooks = append(iracClassificationAfterInsertHooks, iracClassificationHook) + case boil.BeforeUpdateHook: + iracClassificationBeforeUpdateHooks = append(iracClassificationBeforeUpdateHooks, iracClassificationHook) + case boil.AfterUpdateHook: + iracClassificationAfterUpdateHooks = append(iracClassificationAfterUpdateHooks, iracClassificationHook) + case boil.BeforeDeleteHook: + iracClassificationBeforeDeleteHooks = append(iracClassificationBeforeDeleteHooks, iracClassificationHook) + case boil.AfterDeleteHook: + iracClassificationAfterDeleteHooks = append(iracClassificationAfterDeleteHooks, iracClassificationHook) + case boil.BeforeUpsertHook: + iracClassificationBeforeUpsertHooks = append(iracClassificationBeforeUpsertHooks, iracClassificationHook) + case boil.AfterUpsertHook: + iracClassificationAfterUpsertHooks = append(iracClassificationAfterUpsertHooks, iracClassificationHook) + } +} + +// One returns a single iracClassification record from the query. +func (q iracClassificationQuery) One(ctx context.Context, exec boil.ContextExecutor) (*IracClassification, error) { + o := &IracClassification{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for irac_classification") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all IracClassification records from the query. +func (q iracClassificationQuery) All(ctx context.Context, exec boil.ContextExecutor) (IracClassificationSlice, error) { + var o []*IracClassification + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to IracClassification slice") + } + + if len(iracClassificationAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all IracClassification records in the query. +func (q iracClassificationQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count irac_classification rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q iracClassificationQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if irac_classification exists") + } + + return count > 0, nil +} + +// IracClassMoleculeIracClassifications retrieves all the molecule_irac_classification's MoleculeIracClassifications with an executor via irac_class_id column. +func (o *IracClassification) IracClassMoleculeIracClassifications(mods ...qm.QueryMod) moleculeIracClassificationQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"molecule_irac_classification\".\"irac_class_id\"=?", o.IracClassID), + ) + + return MoleculeIracClassifications(queryMods...) +} + +// LoadIracClassMoleculeIracClassifications allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (iracClassificationL) LoadIracClassMoleculeIracClassifications(ctx context.Context, e boil.ContextExecutor, singular bool, maybeIracClassification interface{}, mods queries.Applicator) error { + var slice []*IracClassification + var object *IracClassification + + if singular { + object = maybeIracClassification.(*IracClassification) + } else { + slice = *maybeIracClassification.(*[]*IracClassification) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &iracClassificationR{} + } + args = append(args, object.IracClassID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &iracClassificationR{} + } + + for _, a := range args { + if a == obj.IracClassID { + continue Outer + } + } + + args = append(args, obj.IracClassID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`molecule_irac_classification`), + qm.WhereIn(`molecule_irac_classification.irac_class_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load molecule_irac_classification") + } + + var resultSlice []*MoleculeIracClassification + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice molecule_irac_classification") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on molecule_irac_classification") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for molecule_irac_classification") + } + + if len(moleculeIracClassificationAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.IracClassMoleculeIracClassifications = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &moleculeIracClassificationR{} + } + foreign.R.IracClass = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.IracClassID == foreign.IracClassID { + local.R.IracClassMoleculeIracClassifications = append(local.R.IracClassMoleculeIracClassifications, foreign) + if foreign.R == nil { + foreign.R = &moleculeIracClassificationR{} + } + foreign.R.IracClass = local + break + } + } + } + + return nil +} + +// AddIracClassMoleculeIracClassifications adds the given related objects to the existing relationships +// of the irac_classification, optionally inserting them as new records. +// Appends related to o.R.IracClassMoleculeIracClassifications. +// Sets related.R.IracClass appropriately. +func (o *IracClassification) AddIracClassMoleculeIracClassifications(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*MoleculeIracClassification) error { + var err error + for _, rel := range related { + if insert { + rel.IracClassID = o.IracClassID + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"molecule_irac_classification\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"irac_class_id"}), + strmangle.WhereClause("\"", "\"", 0, moleculeIracClassificationPrimaryKeyColumns), + ) + values := []interface{}{o.IracClassID, rel.MolIracID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.IracClassID = o.IracClassID + } + } + + if o.R == nil { + o.R = &iracClassificationR{ + IracClassMoleculeIracClassifications: related, + } + } else { + o.R.IracClassMoleculeIracClassifications = append(o.R.IracClassMoleculeIracClassifications, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &moleculeIracClassificationR{ + IracClass: o, + } + } else { + rel.R.IracClass = o + } + } + return nil +} + +// IracClassifications retrieves all the records using an executor. +func IracClassifications(mods ...qm.QueryMod) iracClassificationQuery { + mods = append(mods, qm.From("\"irac_classification\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"irac_classification\".*"}) + } + + return iracClassificationQuery{q} +} + +// FindIracClassification retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindIracClassification(ctx context.Context, exec boil.ContextExecutor, iracClassID int64, selectCols ...string) (*IracClassification, error) { + iracClassificationObj := &IracClassification{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"irac_classification\" where \"irac_class_id\"=?", sel, + ) + + q := queries.Raw(query, iracClassID) + + err := q.Bind(ctx, exec, iracClassificationObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from irac_classification") + } + + if err = iracClassificationObj.doAfterSelectHooks(ctx, exec); err != nil { + return iracClassificationObj, err + } + + return iracClassificationObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *IracClassification) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no irac_classification provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(iracClassificationColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + iracClassificationInsertCacheMut.RLock() + cache, cached := iracClassificationInsertCache[key] + iracClassificationInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + iracClassificationAllColumns, + iracClassificationColumnsWithDefault, + iracClassificationColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(iracClassificationType, iracClassificationMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(iracClassificationType, iracClassificationMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"irac_classification\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"irac_classification\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into irac_classification") + } + + if !cached { + iracClassificationInsertCacheMut.Lock() + iracClassificationInsertCache[key] = cache + iracClassificationInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the IracClassification. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *IracClassification) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + iracClassificationUpdateCacheMut.RLock() + cache, cached := iracClassificationUpdateCache[key] + iracClassificationUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + iracClassificationAllColumns, + iracClassificationPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update irac_classification, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"irac_classification\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, iracClassificationPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(iracClassificationType, iracClassificationMapping, append(wl, iracClassificationPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update irac_classification row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for irac_classification") + } + + if !cached { + iracClassificationUpdateCacheMut.Lock() + iracClassificationUpdateCache[key] = cache + iracClassificationUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q iracClassificationQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for irac_classification") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for irac_classification") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o IracClassificationSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), iracClassificationPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"irac_classification\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, iracClassificationPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in iracClassification slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all iracClassification") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *IracClassification) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no irac_classification provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(iracClassificationColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + iracClassificationUpsertCacheMut.RLock() + cache, cached := iracClassificationUpsertCache[key] + iracClassificationUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + iracClassificationAllColumns, + iracClassificationColumnsWithDefault, + iracClassificationColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + iracClassificationAllColumns, + iracClassificationPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert irac_classification, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(iracClassificationPrimaryKeyColumns)) + copy(conflict, iracClassificationPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"irac_classification\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(iracClassificationType, iracClassificationMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(iracClassificationType, iracClassificationMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert irac_classification") + } + + if !cached { + iracClassificationUpsertCacheMut.Lock() + iracClassificationUpsertCache[key] = cache + iracClassificationUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single IracClassification record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *IracClassification) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no IracClassification provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), iracClassificationPrimaryKeyMapping) + sql := "DELETE FROM \"irac_classification\" WHERE \"irac_class_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from irac_classification") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for irac_classification") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q iracClassificationQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no iracClassificationQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from irac_classification") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for irac_classification") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o IracClassificationSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(iracClassificationBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), iracClassificationPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"irac_classification\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, iracClassificationPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from iracClassification slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for irac_classification") + } + + if len(iracClassificationAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *IracClassification) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindIracClassification(ctx, exec, o.IracClassID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *IracClassificationSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := IracClassificationSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), iracClassificationPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"irac_classification\".* FROM \"irac_classification\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, iracClassificationPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in IracClassificationSlice") + } + + *o = slice + + return nil +} + +// IracClassificationExists checks if the IracClassification row exists. +func IracClassificationExists(ctx context.Context, exec boil.ContextExecutor, iracClassID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"irac_classification\" where \"irac_class_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, iracClassID) + } + row := exec.QueryRowContext(ctx, sql, iracClassID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if irac_classification exists") + } + + return exists, nil +} diff --git a/models/ligand_eff.go b/models/ligand_eff.go new file mode 100644 index 0000000..831e2f0 --- /dev/null +++ b/models/ligand_eff.go @@ -0,0 +1,1084 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/sqlboiler/v4/types" + "github.com/volatiletech/strmangle" +) + +// LigandEff is an object representing the database table. +type LigandEff struct { + ActivityID int64 `boil:"activity_id" json:"activity_id" toml:"activity_id" yaml:"activity_id"` + Bei types.NullDecimal `boil:"bei" json:"bei,omitempty" toml:"bei" yaml:"bei,omitempty"` + Sei types.NullDecimal `boil:"sei" json:"sei,omitempty" toml:"sei" yaml:"sei,omitempty"` + Le types.NullDecimal `boil:"le" json:"le,omitempty" toml:"le" yaml:"le,omitempty"` + Lle types.NullDecimal `boil:"lle" json:"lle,omitempty" toml:"lle" yaml:"lle,omitempty"` + + R *ligandEffR `boil:"-" json:"-" toml:"-" yaml:"-"` + L ligandEffL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var LigandEffColumns = struct { + ActivityID string + Bei string + Sei string + Le string + Lle string +}{ + ActivityID: "activity_id", + Bei: "bei", + Sei: "sei", + Le: "le", + Lle: "lle", +} + +var LigandEffTableColumns = struct { + ActivityID string + Bei string + Sei string + Le string + Lle string +}{ + ActivityID: "ligand_eff.activity_id", + Bei: "ligand_eff.bei", + Sei: "ligand_eff.sei", + Le: "ligand_eff.le", + Lle: "ligand_eff.lle", +} + +// Generated where + +var LigandEffWhere = struct { + ActivityID whereHelperint64 + Bei whereHelpertypes_NullDecimal + Sei whereHelpertypes_NullDecimal + Le whereHelpertypes_NullDecimal + Lle whereHelpertypes_NullDecimal +}{ + ActivityID: whereHelperint64{field: "\"ligand_eff\".\"activity_id\""}, + Bei: whereHelpertypes_NullDecimal{field: "\"ligand_eff\".\"bei\""}, + Sei: whereHelpertypes_NullDecimal{field: "\"ligand_eff\".\"sei\""}, + Le: whereHelpertypes_NullDecimal{field: "\"ligand_eff\".\"le\""}, + Lle: whereHelpertypes_NullDecimal{field: "\"ligand_eff\".\"lle\""}, +} + +// LigandEffRels is where relationship names are stored. +var LigandEffRels = struct { + Activity string +}{ + Activity: "Activity", +} + +// ligandEffR is where relationships are stored. +type ligandEffR struct { + Activity *Activity `boil:"Activity" json:"Activity" toml:"Activity" yaml:"Activity"` +} + +// NewStruct creates a new relationship struct +func (*ligandEffR) NewStruct() *ligandEffR { + return &ligandEffR{} +} + +func (r *ligandEffR) GetActivity() *Activity { + if r == nil { + return nil + } + return r.Activity +} + +// ligandEffL is where Load methods for each relationship are stored. +type ligandEffL struct{} + +var ( + ligandEffAllColumns = []string{"activity_id", "bei", "sei", "le", "lle"} + ligandEffColumnsWithoutDefault = []string{"activity_id"} + ligandEffColumnsWithDefault = []string{"bei", "sei", "le", "lle"} + ligandEffPrimaryKeyColumns = []string{"activity_id"} + ligandEffGeneratedColumns = []string{} +) + +type ( + // LigandEffSlice is an alias for a slice of pointers to LigandEff. + // This should almost always be used instead of []LigandEff. + LigandEffSlice []*LigandEff + // LigandEffHook is the signature for custom LigandEff hook methods + LigandEffHook func(context.Context, boil.ContextExecutor, *LigandEff) error + + ligandEffQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + ligandEffType = reflect.TypeOf(&LigandEff{}) + ligandEffMapping = queries.MakeStructMapping(ligandEffType) + ligandEffPrimaryKeyMapping, _ = queries.BindMapping(ligandEffType, ligandEffMapping, ligandEffPrimaryKeyColumns) + ligandEffInsertCacheMut sync.RWMutex + ligandEffInsertCache = make(map[string]insertCache) + ligandEffUpdateCacheMut sync.RWMutex + ligandEffUpdateCache = make(map[string]updateCache) + ligandEffUpsertCacheMut sync.RWMutex + ligandEffUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var ligandEffAfterSelectHooks []LigandEffHook + +var ligandEffBeforeInsertHooks []LigandEffHook +var ligandEffAfterInsertHooks []LigandEffHook + +var ligandEffBeforeUpdateHooks []LigandEffHook +var ligandEffAfterUpdateHooks []LigandEffHook + +var ligandEffBeforeDeleteHooks []LigandEffHook +var ligandEffAfterDeleteHooks []LigandEffHook + +var ligandEffBeforeUpsertHooks []LigandEffHook +var ligandEffAfterUpsertHooks []LigandEffHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *LigandEff) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range ligandEffAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *LigandEff) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range ligandEffBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *LigandEff) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range ligandEffAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *LigandEff) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range ligandEffBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *LigandEff) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range ligandEffAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *LigandEff) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range ligandEffBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *LigandEff) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range ligandEffAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *LigandEff) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range ligandEffBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *LigandEff) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range ligandEffAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddLigandEffHook registers your hook function for all future operations. +func AddLigandEffHook(hookPoint boil.HookPoint, ligandEffHook LigandEffHook) { + switch hookPoint { + case boil.AfterSelectHook: + ligandEffAfterSelectHooks = append(ligandEffAfterSelectHooks, ligandEffHook) + case boil.BeforeInsertHook: + ligandEffBeforeInsertHooks = append(ligandEffBeforeInsertHooks, ligandEffHook) + case boil.AfterInsertHook: + ligandEffAfterInsertHooks = append(ligandEffAfterInsertHooks, ligandEffHook) + case boil.BeforeUpdateHook: + ligandEffBeforeUpdateHooks = append(ligandEffBeforeUpdateHooks, ligandEffHook) + case boil.AfterUpdateHook: + ligandEffAfterUpdateHooks = append(ligandEffAfterUpdateHooks, ligandEffHook) + case boil.BeforeDeleteHook: + ligandEffBeforeDeleteHooks = append(ligandEffBeforeDeleteHooks, ligandEffHook) + case boil.AfterDeleteHook: + ligandEffAfterDeleteHooks = append(ligandEffAfterDeleteHooks, ligandEffHook) + case boil.BeforeUpsertHook: + ligandEffBeforeUpsertHooks = append(ligandEffBeforeUpsertHooks, ligandEffHook) + case boil.AfterUpsertHook: + ligandEffAfterUpsertHooks = append(ligandEffAfterUpsertHooks, ligandEffHook) + } +} + +// One returns a single ligandEff record from the query. +func (q ligandEffQuery) One(ctx context.Context, exec boil.ContextExecutor) (*LigandEff, error) { + o := &LigandEff{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for ligand_eff") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all LigandEff records from the query. +func (q ligandEffQuery) All(ctx context.Context, exec boil.ContextExecutor) (LigandEffSlice, error) { + var o []*LigandEff + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to LigandEff slice") + } + + if len(ligandEffAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all LigandEff records in the query. +func (q ligandEffQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count ligand_eff rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q ligandEffQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if ligand_eff exists") + } + + return count > 0, nil +} + +// Activity pointed to by the foreign key. +func (o *LigandEff) Activity(mods ...qm.QueryMod) activityQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"activity_id\" = ?", o.ActivityID), + } + + queryMods = append(queryMods, mods...) + + return Activities(queryMods...) +} + +// LoadActivity allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (ligandEffL) LoadActivity(ctx context.Context, e boil.ContextExecutor, singular bool, maybeLigandEff interface{}, mods queries.Applicator) error { + var slice []*LigandEff + var object *LigandEff + + if singular { + object = maybeLigandEff.(*LigandEff) + } else { + slice = *maybeLigandEff.(*[]*LigandEff) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &ligandEffR{} + } + args = append(args, object.ActivityID) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &ligandEffR{} + } + + for _, a := range args { + if a == obj.ActivityID { + continue Outer + } + } + + args = append(args, obj.ActivityID) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`activities`), + qm.WhereIn(`activities.activity_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load Activity") + } + + var resultSlice []*Activity + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice Activity") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for activities") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for activities") + } + + if len(ligandEffAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Activity = foreign + if foreign.R == nil { + foreign.R = &activityR{} + } + foreign.R.LigandEff = object + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.ActivityID == foreign.ActivityID { + local.R.Activity = foreign + if foreign.R == nil { + foreign.R = &activityR{} + } + foreign.R.LigandEff = local + break + } + } + } + + return nil +} + +// SetActivity of the ligandEff to the related item. +// Sets o.R.Activity to related. +// Adds o to related.R.LigandEff. +func (o *LigandEff) SetActivity(ctx context.Context, exec boil.ContextExecutor, insert bool, related *Activity) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"ligand_eff\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"activity_id"}), + strmangle.WhereClause("\"", "\"", 0, ligandEffPrimaryKeyColumns), + ) + values := []interface{}{related.ActivityID, o.ActivityID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.ActivityID = related.ActivityID + if o.R == nil { + o.R = &ligandEffR{ + Activity: related, + } + } else { + o.R.Activity = related + } + + if related.R == nil { + related.R = &activityR{ + LigandEff: o, + } + } else { + related.R.LigandEff = o + } + + return nil +} + +// LigandEffs retrieves all the records using an executor. +func LigandEffs(mods ...qm.QueryMod) ligandEffQuery { + mods = append(mods, qm.From("\"ligand_eff\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"ligand_eff\".*"}) + } + + return ligandEffQuery{q} +} + +// FindLigandEff retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindLigandEff(ctx context.Context, exec boil.ContextExecutor, activityID int64, selectCols ...string) (*LigandEff, error) { + ligandEffObj := &LigandEff{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"ligand_eff\" where \"activity_id\"=?", sel, + ) + + q := queries.Raw(query, activityID) + + err := q.Bind(ctx, exec, ligandEffObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from ligand_eff") + } + + if err = ligandEffObj.doAfterSelectHooks(ctx, exec); err != nil { + return ligandEffObj, err + } + + return ligandEffObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *LigandEff) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no ligand_eff provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(ligandEffColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + ligandEffInsertCacheMut.RLock() + cache, cached := ligandEffInsertCache[key] + ligandEffInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + ligandEffAllColumns, + ligandEffColumnsWithDefault, + ligandEffColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(ligandEffType, ligandEffMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(ligandEffType, ligandEffMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"ligand_eff\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"ligand_eff\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into ligand_eff") + } + + if !cached { + ligandEffInsertCacheMut.Lock() + ligandEffInsertCache[key] = cache + ligandEffInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the LigandEff. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *LigandEff) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + ligandEffUpdateCacheMut.RLock() + cache, cached := ligandEffUpdateCache[key] + ligandEffUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + ligandEffAllColumns, + ligandEffPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update ligand_eff, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"ligand_eff\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, ligandEffPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(ligandEffType, ligandEffMapping, append(wl, ligandEffPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update ligand_eff row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for ligand_eff") + } + + if !cached { + ligandEffUpdateCacheMut.Lock() + ligandEffUpdateCache[key] = cache + ligandEffUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q ligandEffQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for ligand_eff") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for ligand_eff") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o LigandEffSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), ligandEffPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"ligand_eff\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, ligandEffPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in ligandEff slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all ligandEff") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *LigandEff) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no ligand_eff provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(ligandEffColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + ligandEffUpsertCacheMut.RLock() + cache, cached := ligandEffUpsertCache[key] + ligandEffUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + ligandEffAllColumns, + ligandEffColumnsWithDefault, + ligandEffColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + ligandEffAllColumns, + ligandEffPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert ligand_eff, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(ligandEffPrimaryKeyColumns)) + copy(conflict, ligandEffPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"ligand_eff\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(ligandEffType, ligandEffMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(ligandEffType, ligandEffMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert ligand_eff") + } + + if !cached { + ligandEffUpsertCacheMut.Lock() + ligandEffUpsertCache[key] = cache + ligandEffUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single LigandEff record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *LigandEff) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no LigandEff provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), ligandEffPrimaryKeyMapping) + sql := "DELETE FROM \"ligand_eff\" WHERE \"activity_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from ligand_eff") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for ligand_eff") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q ligandEffQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no ligandEffQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from ligand_eff") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for ligand_eff") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o LigandEffSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(ligandEffBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), ligandEffPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"ligand_eff\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, ligandEffPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from ligandEff slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for ligand_eff") + } + + if len(ligandEffAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *LigandEff) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindLigandEff(ctx, exec, o.ActivityID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *LigandEffSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := LigandEffSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), ligandEffPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"ligand_eff\".* FROM \"ligand_eff\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, ligandEffPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in LigandEffSlice") + } + + *o = slice + + return nil +} + +// LigandEffExists checks if the LigandEff row exists. +func LigandEffExists(ctx context.Context, exec boil.ContextExecutor, activityID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"ligand_eff\" where \"activity_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, activityID) + } + row := exec.QueryRowContext(ctx, sql, activityID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if ligand_eff exists") + } + + return exists, nil +} diff --git a/models/mechanism_refs.go b/models/mechanism_refs.go new file mode 100644 index 0000000..e9d3dd6 --- /dev/null +++ b/models/mechanism_refs.go @@ -0,0 +1,1084 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// MechanismRef is an object representing the database table. +type MechanismRef struct { + MecrefID int64 `boil:"mecref_id" json:"mecref_id" toml:"mecref_id" yaml:"mecref_id"` + MecID int64 `boil:"mec_id" json:"mec_id" toml:"mec_id" yaml:"mec_id"` + RefType string `boil:"ref_type" json:"ref_type" toml:"ref_type" yaml:"ref_type"` + RefID null.String `boil:"ref_id" json:"ref_id,omitempty" toml:"ref_id" yaml:"ref_id,omitempty"` + RefURL null.String `boil:"ref_url" json:"ref_url,omitempty" toml:"ref_url" yaml:"ref_url,omitempty"` + + R *mechanismRefR `boil:"-" json:"-" toml:"-" yaml:"-"` + L mechanismRefL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var MechanismRefColumns = struct { + MecrefID string + MecID string + RefType string + RefID string + RefURL string +}{ + MecrefID: "mecref_id", + MecID: "mec_id", + RefType: "ref_type", + RefID: "ref_id", + RefURL: "ref_url", +} + +var MechanismRefTableColumns = struct { + MecrefID string + MecID string + RefType string + RefID string + RefURL string +}{ + MecrefID: "mechanism_refs.mecref_id", + MecID: "mechanism_refs.mec_id", + RefType: "mechanism_refs.ref_type", + RefID: "mechanism_refs.ref_id", + RefURL: "mechanism_refs.ref_url", +} + +// Generated where + +var MechanismRefWhere = struct { + MecrefID whereHelperint64 + MecID whereHelperint64 + RefType whereHelperstring + RefID whereHelpernull_String + RefURL whereHelpernull_String +}{ + MecrefID: whereHelperint64{field: "\"mechanism_refs\".\"mecref_id\""}, + MecID: whereHelperint64{field: "\"mechanism_refs\".\"mec_id\""}, + RefType: whereHelperstring{field: "\"mechanism_refs\".\"ref_type\""}, + RefID: whereHelpernull_String{field: "\"mechanism_refs\".\"ref_id\""}, + RefURL: whereHelpernull_String{field: "\"mechanism_refs\".\"ref_url\""}, +} + +// MechanismRefRels is where relationship names are stored. +var MechanismRefRels = struct { + Mec string +}{ + Mec: "Mec", +} + +// mechanismRefR is where relationships are stored. +type mechanismRefR struct { + Mec *DrugMechanism `boil:"Mec" json:"Mec" toml:"Mec" yaml:"Mec"` +} + +// NewStruct creates a new relationship struct +func (*mechanismRefR) NewStruct() *mechanismRefR { + return &mechanismRefR{} +} + +func (r *mechanismRefR) GetMec() *DrugMechanism { + if r == nil { + return nil + } + return r.Mec +} + +// mechanismRefL is where Load methods for each relationship are stored. +type mechanismRefL struct{} + +var ( + mechanismRefAllColumns = []string{"mecref_id", "mec_id", "ref_type", "ref_id", "ref_url"} + mechanismRefColumnsWithoutDefault = []string{"mecref_id", "mec_id", "ref_type"} + mechanismRefColumnsWithDefault = []string{"ref_id", "ref_url"} + mechanismRefPrimaryKeyColumns = []string{"mecref_id"} + mechanismRefGeneratedColumns = []string{} +) + +type ( + // MechanismRefSlice is an alias for a slice of pointers to MechanismRef. + // This should almost always be used instead of []MechanismRef. + MechanismRefSlice []*MechanismRef + // MechanismRefHook is the signature for custom MechanismRef hook methods + MechanismRefHook func(context.Context, boil.ContextExecutor, *MechanismRef) error + + mechanismRefQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + mechanismRefType = reflect.TypeOf(&MechanismRef{}) + mechanismRefMapping = queries.MakeStructMapping(mechanismRefType) + mechanismRefPrimaryKeyMapping, _ = queries.BindMapping(mechanismRefType, mechanismRefMapping, mechanismRefPrimaryKeyColumns) + mechanismRefInsertCacheMut sync.RWMutex + mechanismRefInsertCache = make(map[string]insertCache) + mechanismRefUpdateCacheMut sync.RWMutex + mechanismRefUpdateCache = make(map[string]updateCache) + mechanismRefUpsertCacheMut sync.RWMutex + mechanismRefUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var mechanismRefAfterSelectHooks []MechanismRefHook + +var mechanismRefBeforeInsertHooks []MechanismRefHook +var mechanismRefAfterInsertHooks []MechanismRefHook + +var mechanismRefBeforeUpdateHooks []MechanismRefHook +var mechanismRefAfterUpdateHooks []MechanismRefHook + +var mechanismRefBeforeDeleteHooks []MechanismRefHook +var mechanismRefAfterDeleteHooks []MechanismRefHook + +var mechanismRefBeforeUpsertHooks []MechanismRefHook +var mechanismRefAfterUpsertHooks []MechanismRefHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *MechanismRef) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range mechanismRefAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *MechanismRef) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range mechanismRefBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *MechanismRef) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range mechanismRefAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *MechanismRef) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range mechanismRefBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *MechanismRef) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range mechanismRefAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *MechanismRef) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range mechanismRefBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *MechanismRef) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range mechanismRefAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *MechanismRef) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range mechanismRefBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *MechanismRef) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range mechanismRefAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddMechanismRefHook registers your hook function for all future operations. +func AddMechanismRefHook(hookPoint boil.HookPoint, mechanismRefHook MechanismRefHook) { + switch hookPoint { + case boil.AfterSelectHook: + mechanismRefAfterSelectHooks = append(mechanismRefAfterSelectHooks, mechanismRefHook) + case boil.BeforeInsertHook: + mechanismRefBeforeInsertHooks = append(mechanismRefBeforeInsertHooks, mechanismRefHook) + case boil.AfterInsertHook: + mechanismRefAfterInsertHooks = append(mechanismRefAfterInsertHooks, mechanismRefHook) + case boil.BeforeUpdateHook: + mechanismRefBeforeUpdateHooks = append(mechanismRefBeforeUpdateHooks, mechanismRefHook) + case boil.AfterUpdateHook: + mechanismRefAfterUpdateHooks = append(mechanismRefAfterUpdateHooks, mechanismRefHook) + case boil.BeforeDeleteHook: + mechanismRefBeforeDeleteHooks = append(mechanismRefBeforeDeleteHooks, mechanismRefHook) + case boil.AfterDeleteHook: + mechanismRefAfterDeleteHooks = append(mechanismRefAfterDeleteHooks, mechanismRefHook) + case boil.BeforeUpsertHook: + mechanismRefBeforeUpsertHooks = append(mechanismRefBeforeUpsertHooks, mechanismRefHook) + case boil.AfterUpsertHook: + mechanismRefAfterUpsertHooks = append(mechanismRefAfterUpsertHooks, mechanismRefHook) + } +} + +// One returns a single mechanismRef record from the query. +func (q mechanismRefQuery) One(ctx context.Context, exec boil.ContextExecutor) (*MechanismRef, error) { + o := &MechanismRef{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for mechanism_refs") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all MechanismRef records from the query. +func (q mechanismRefQuery) All(ctx context.Context, exec boil.ContextExecutor) (MechanismRefSlice, error) { + var o []*MechanismRef + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to MechanismRef slice") + } + + if len(mechanismRefAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all MechanismRef records in the query. +func (q mechanismRefQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count mechanism_refs rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q mechanismRefQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if mechanism_refs exists") + } + + return count > 0, nil +} + +// Mec pointed to by the foreign key. +func (o *MechanismRef) Mec(mods ...qm.QueryMod) drugMechanismQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"mec_id\" = ?", o.MecID), + } + + queryMods = append(queryMods, mods...) + + return DrugMechanisms(queryMods...) +} + +// LoadMec allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (mechanismRefL) LoadMec(ctx context.Context, e boil.ContextExecutor, singular bool, maybeMechanismRef interface{}, mods queries.Applicator) error { + var slice []*MechanismRef + var object *MechanismRef + + if singular { + object = maybeMechanismRef.(*MechanismRef) + } else { + slice = *maybeMechanismRef.(*[]*MechanismRef) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &mechanismRefR{} + } + args = append(args, object.MecID) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &mechanismRefR{} + } + + for _, a := range args { + if a == obj.MecID { + continue Outer + } + } + + args = append(args, obj.MecID) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`drug_mechanism`), + qm.WhereIn(`drug_mechanism.mec_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load DrugMechanism") + } + + var resultSlice []*DrugMechanism + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice DrugMechanism") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for drug_mechanism") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for drug_mechanism") + } + + if len(mechanismRefAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Mec = foreign + if foreign.R == nil { + foreign.R = &drugMechanismR{} + } + foreign.R.MecMechanismRefs = append(foreign.R.MecMechanismRefs, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.MecID == foreign.MecID { + local.R.Mec = foreign + if foreign.R == nil { + foreign.R = &drugMechanismR{} + } + foreign.R.MecMechanismRefs = append(foreign.R.MecMechanismRefs, local) + break + } + } + } + + return nil +} + +// SetMec of the mechanismRef to the related item. +// Sets o.R.Mec to related. +// Adds o to related.R.MecMechanismRefs. +func (o *MechanismRef) SetMec(ctx context.Context, exec boil.ContextExecutor, insert bool, related *DrugMechanism) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"mechanism_refs\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"mec_id"}), + strmangle.WhereClause("\"", "\"", 0, mechanismRefPrimaryKeyColumns), + ) + values := []interface{}{related.MecID, o.MecrefID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.MecID = related.MecID + if o.R == nil { + o.R = &mechanismRefR{ + Mec: related, + } + } else { + o.R.Mec = related + } + + if related.R == nil { + related.R = &drugMechanismR{ + MecMechanismRefs: MechanismRefSlice{o}, + } + } else { + related.R.MecMechanismRefs = append(related.R.MecMechanismRefs, o) + } + + return nil +} + +// MechanismRefs retrieves all the records using an executor. +func MechanismRefs(mods ...qm.QueryMod) mechanismRefQuery { + mods = append(mods, qm.From("\"mechanism_refs\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"mechanism_refs\".*"}) + } + + return mechanismRefQuery{q} +} + +// FindMechanismRef retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindMechanismRef(ctx context.Context, exec boil.ContextExecutor, mecrefID int64, selectCols ...string) (*MechanismRef, error) { + mechanismRefObj := &MechanismRef{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"mechanism_refs\" where \"mecref_id\"=?", sel, + ) + + q := queries.Raw(query, mecrefID) + + err := q.Bind(ctx, exec, mechanismRefObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from mechanism_refs") + } + + if err = mechanismRefObj.doAfterSelectHooks(ctx, exec); err != nil { + return mechanismRefObj, err + } + + return mechanismRefObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *MechanismRef) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no mechanism_refs provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(mechanismRefColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + mechanismRefInsertCacheMut.RLock() + cache, cached := mechanismRefInsertCache[key] + mechanismRefInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + mechanismRefAllColumns, + mechanismRefColumnsWithDefault, + mechanismRefColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(mechanismRefType, mechanismRefMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(mechanismRefType, mechanismRefMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"mechanism_refs\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"mechanism_refs\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into mechanism_refs") + } + + if !cached { + mechanismRefInsertCacheMut.Lock() + mechanismRefInsertCache[key] = cache + mechanismRefInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the MechanismRef. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *MechanismRef) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + mechanismRefUpdateCacheMut.RLock() + cache, cached := mechanismRefUpdateCache[key] + mechanismRefUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + mechanismRefAllColumns, + mechanismRefPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update mechanism_refs, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"mechanism_refs\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, mechanismRefPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(mechanismRefType, mechanismRefMapping, append(wl, mechanismRefPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update mechanism_refs row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for mechanism_refs") + } + + if !cached { + mechanismRefUpdateCacheMut.Lock() + mechanismRefUpdateCache[key] = cache + mechanismRefUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q mechanismRefQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for mechanism_refs") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for mechanism_refs") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o MechanismRefSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), mechanismRefPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"mechanism_refs\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, mechanismRefPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in mechanismRef slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all mechanismRef") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *MechanismRef) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no mechanism_refs provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(mechanismRefColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + mechanismRefUpsertCacheMut.RLock() + cache, cached := mechanismRefUpsertCache[key] + mechanismRefUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + mechanismRefAllColumns, + mechanismRefColumnsWithDefault, + mechanismRefColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + mechanismRefAllColumns, + mechanismRefPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert mechanism_refs, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(mechanismRefPrimaryKeyColumns)) + copy(conflict, mechanismRefPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"mechanism_refs\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(mechanismRefType, mechanismRefMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(mechanismRefType, mechanismRefMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert mechanism_refs") + } + + if !cached { + mechanismRefUpsertCacheMut.Lock() + mechanismRefUpsertCache[key] = cache + mechanismRefUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single MechanismRef record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *MechanismRef) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no MechanismRef provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), mechanismRefPrimaryKeyMapping) + sql := "DELETE FROM \"mechanism_refs\" WHERE \"mecref_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from mechanism_refs") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for mechanism_refs") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q mechanismRefQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no mechanismRefQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from mechanism_refs") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for mechanism_refs") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o MechanismRefSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(mechanismRefBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), mechanismRefPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"mechanism_refs\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, mechanismRefPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from mechanismRef slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for mechanism_refs") + } + + if len(mechanismRefAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *MechanismRef) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindMechanismRef(ctx, exec, o.MecrefID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *MechanismRefSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := MechanismRefSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), mechanismRefPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"mechanism_refs\".* FROM \"mechanism_refs\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, mechanismRefPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in MechanismRefSlice") + } + + *o = slice + + return nil +} + +// MechanismRefExists checks if the MechanismRef row exists. +func MechanismRefExists(ctx context.Context, exec boil.ContextExecutor, mecrefID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"mechanism_refs\" where \"mecref_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, mecrefID) + } + row := exec.QueryRowContext(ctx, sql, mecrefID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if mechanism_refs exists") + } + + return exists, nil +} diff --git a/models/metabolism.go b/models/metabolism.go new file mode 100644 index 0000000..7afdea8 --- /dev/null +++ b/models/metabolism.go @@ -0,0 +1,1972 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// Metabolism is an object representing the database table. +type Metabolism struct { + MetID int64 `boil:"met_id" json:"met_id" toml:"met_id" yaml:"met_id"` + DrugRecordID null.Int64 `boil:"drug_record_id" json:"drug_record_id,omitempty" toml:"drug_record_id" yaml:"drug_record_id,omitempty"` + SubstrateRecordID null.Int64 `boil:"substrate_record_id" json:"substrate_record_id,omitempty" toml:"substrate_record_id" yaml:"substrate_record_id,omitempty"` + MetaboliteRecordID null.Int64 `boil:"metabolite_record_id" json:"metabolite_record_id,omitempty" toml:"metabolite_record_id" yaml:"metabolite_record_id,omitempty"` + PathwayID null.Int64 `boil:"pathway_id" json:"pathway_id,omitempty" toml:"pathway_id" yaml:"pathway_id,omitempty"` + PathwayKey null.String `boil:"pathway_key" json:"pathway_key,omitempty" toml:"pathway_key" yaml:"pathway_key,omitempty"` + EnzymeName null.String `boil:"enzyme_name" json:"enzyme_name,omitempty" toml:"enzyme_name" yaml:"enzyme_name,omitempty"` + EnzymeTid null.Int64 `boil:"enzyme_tid" json:"enzyme_tid,omitempty" toml:"enzyme_tid" yaml:"enzyme_tid,omitempty"` + MetConversion null.String `boil:"met_conversion" json:"met_conversion,omitempty" toml:"met_conversion" yaml:"met_conversion,omitempty"` + Organism null.String `boil:"organism" json:"organism,omitempty" toml:"organism" yaml:"organism,omitempty"` + TaxID null.Int64 `boil:"tax_id" json:"tax_id,omitempty" toml:"tax_id" yaml:"tax_id,omitempty"` + MetComment null.String `boil:"met_comment" json:"met_comment,omitempty" toml:"met_comment" yaml:"met_comment,omitempty"` + + R *metabolismR `boil:"-" json:"-" toml:"-" yaml:"-"` + L metabolismL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var MetabolismColumns = struct { + MetID string + DrugRecordID string + SubstrateRecordID string + MetaboliteRecordID string + PathwayID string + PathwayKey string + EnzymeName string + EnzymeTid string + MetConversion string + Organism string + TaxID string + MetComment string +}{ + MetID: "met_id", + DrugRecordID: "drug_record_id", + SubstrateRecordID: "substrate_record_id", + MetaboliteRecordID: "metabolite_record_id", + PathwayID: "pathway_id", + PathwayKey: "pathway_key", + EnzymeName: "enzyme_name", + EnzymeTid: "enzyme_tid", + MetConversion: "met_conversion", + Organism: "organism", + TaxID: "tax_id", + MetComment: "met_comment", +} + +var MetabolismTableColumns = struct { + MetID string + DrugRecordID string + SubstrateRecordID string + MetaboliteRecordID string + PathwayID string + PathwayKey string + EnzymeName string + EnzymeTid string + MetConversion string + Organism string + TaxID string + MetComment string +}{ + MetID: "metabolism.met_id", + DrugRecordID: "metabolism.drug_record_id", + SubstrateRecordID: "metabolism.substrate_record_id", + MetaboliteRecordID: "metabolism.metabolite_record_id", + PathwayID: "metabolism.pathway_id", + PathwayKey: "metabolism.pathway_key", + EnzymeName: "metabolism.enzyme_name", + EnzymeTid: "metabolism.enzyme_tid", + MetConversion: "metabolism.met_conversion", + Organism: "metabolism.organism", + TaxID: "metabolism.tax_id", + MetComment: "metabolism.met_comment", +} + +// Generated where + +var MetabolismWhere = struct { + MetID whereHelperint64 + DrugRecordID whereHelpernull_Int64 + SubstrateRecordID whereHelpernull_Int64 + MetaboliteRecordID whereHelpernull_Int64 + PathwayID whereHelpernull_Int64 + PathwayKey whereHelpernull_String + EnzymeName whereHelpernull_String + EnzymeTid whereHelpernull_Int64 + MetConversion whereHelpernull_String + Organism whereHelpernull_String + TaxID whereHelpernull_Int64 + MetComment whereHelpernull_String +}{ + MetID: whereHelperint64{field: "\"metabolism\".\"met_id\""}, + DrugRecordID: whereHelpernull_Int64{field: "\"metabolism\".\"drug_record_id\""}, + SubstrateRecordID: whereHelpernull_Int64{field: "\"metabolism\".\"substrate_record_id\""}, + MetaboliteRecordID: whereHelpernull_Int64{field: "\"metabolism\".\"metabolite_record_id\""}, + PathwayID: whereHelpernull_Int64{field: "\"metabolism\".\"pathway_id\""}, + PathwayKey: whereHelpernull_String{field: "\"metabolism\".\"pathway_key\""}, + EnzymeName: whereHelpernull_String{field: "\"metabolism\".\"enzyme_name\""}, + EnzymeTid: whereHelpernull_Int64{field: "\"metabolism\".\"enzyme_tid\""}, + MetConversion: whereHelpernull_String{field: "\"metabolism\".\"met_conversion\""}, + Organism: whereHelpernull_String{field: "\"metabolism\".\"organism\""}, + TaxID: whereHelpernull_Int64{field: "\"metabolism\".\"tax_id\""}, + MetComment: whereHelpernull_String{field: "\"metabolism\".\"met_comment\""}, +} + +// MetabolismRels is where relationship names are stored. +var MetabolismRels = struct { + EnzymeTidTargetDictionary string + SubstrateRecord string + MetaboliteRecord string + DrugRecord string + MetMetabolismRefs string +}{ + EnzymeTidTargetDictionary: "EnzymeTidTargetDictionary", + SubstrateRecord: "SubstrateRecord", + MetaboliteRecord: "MetaboliteRecord", + DrugRecord: "DrugRecord", + MetMetabolismRefs: "MetMetabolismRefs", +} + +// metabolismR is where relationships are stored. +type metabolismR struct { + EnzymeTidTargetDictionary *TargetDictionary `boil:"EnzymeTidTargetDictionary" json:"EnzymeTidTargetDictionary" toml:"EnzymeTidTargetDictionary" yaml:"EnzymeTidTargetDictionary"` + SubstrateRecord *CompoundRecord `boil:"SubstrateRecord" json:"SubstrateRecord" toml:"SubstrateRecord" yaml:"SubstrateRecord"` + MetaboliteRecord *CompoundRecord `boil:"MetaboliteRecord" json:"MetaboliteRecord" toml:"MetaboliteRecord" yaml:"MetaboliteRecord"` + DrugRecord *CompoundRecord `boil:"DrugRecord" json:"DrugRecord" toml:"DrugRecord" yaml:"DrugRecord"` + MetMetabolismRefs MetabolismRefSlice `boil:"MetMetabolismRefs" json:"MetMetabolismRefs" toml:"MetMetabolismRefs" yaml:"MetMetabolismRefs"` +} + +// NewStruct creates a new relationship struct +func (*metabolismR) NewStruct() *metabolismR { + return &metabolismR{} +} + +func (r *metabolismR) GetEnzymeTidTargetDictionary() *TargetDictionary { + if r == nil { + return nil + } + return r.EnzymeTidTargetDictionary +} + +func (r *metabolismR) GetSubstrateRecord() *CompoundRecord { + if r == nil { + return nil + } + return r.SubstrateRecord +} + +func (r *metabolismR) GetMetaboliteRecord() *CompoundRecord { + if r == nil { + return nil + } + return r.MetaboliteRecord +} + +func (r *metabolismR) GetDrugRecord() *CompoundRecord { + if r == nil { + return nil + } + return r.DrugRecord +} + +func (r *metabolismR) GetMetMetabolismRefs() MetabolismRefSlice { + if r == nil { + return nil + } + return r.MetMetabolismRefs +} + +// metabolismL is where Load methods for each relationship are stored. +type metabolismL struct{} + +var ( + metabolismAllColumns = []string{"met_id", "drug_record_id", "substrate_record_id", "metabolite_record_id", "pathway_id", "pathway_key", "enzyme_name", "enzyme_tid", "met_conversion", "organism", "tax_id", "met_comment"} + metabolismColumnsWithoutDefault = []string{"met_id"} + metabolismColumnsWithDefault = []string{"drug_record_id", "substrate_record_id", "metabolite_record_id", "pathway_id", "pathway_key", "enzyme_name", "enzyme_tid", "met_conversion", "organism", "tax_id", "met_comment"} + metabolismPrimaryKeyColumns = []string{"met_id"} + metabolismGeneratedColumns = []string{} +) + +type ( + // MetabolismSlice is an alias for a slice of pointers to Metabolism. + // This should almost always be used instead of []Metabolism. + MetabolismSlice []*Metabolism + // MetabolismHook is the signature for custom Metabolism hook methods + MetabolismHook func(context.Context, boil.ContextExecutor, *Metabolism) error + + metabolismQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + metabolismType = reflect.TypeOf(&Metabolism{}) + metabolismMapping = queries.MakeStructMapping(metabolismType) + metabolismPrimaryKeyMapping, _ = queries.BindMapping(metabolismType, metabolismMapping, metabolismPrimaryKeyColumns) + metabolismInsertCacheMut sync.RWMutex + metabolismInsertCache = make(map[string]insertCache) + metabolismUpdateCacheMut sync.RWMutex + metabolismUpdateCache = make(map[string]updateCache) + metabolismUpsertCacheMut sync.RWMutex + metabolismUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var metabolismAfterSelectHooks []MetabolismHook + +var metabolismBeforeInsertHooks []MetabolismHook +var metabolismAfterInsertHooks []MetabolismHook + +var metabolismBeforeUpdateHooks []MetabolismHook +var metabolismAfterUpdateHooks []MetabolismHook + +var metabolismBeforeDeleteHooks []MetabolismHook +var metabolismAfterDeleteHooks []MetabolismHook + +var metabolismBeforeUpsertHooks []MetabolismHook +var metabolismAfterUpsertHooks []MetabolismHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *Metabolism) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range metabolismAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *Metabolism) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range metabolismBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *Metabolism) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range metabolismAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *Metabolism) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range metabolismBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *Metabolism) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range metabolismAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *Metabolism) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range metabolismBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *Metabolism) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range metabolismAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *Metabolism) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range metabolismBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *Metabolism) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range metabolismAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddMetabolismHook registers your hook function for all future operations. +func AddMetabolismHook(hookPoint boil.HookPoint, metabolismHook MetabolismHook) { + switch hookPoint { + case boil.AfterSelectHook: + metabolismAfterSelectHooks = append(metabolismAfterSelectHooks, metabolismHook) + case boil.BeforeInsertHook: + metabolismBeforeInsertHooks = append(metabolismBeforeInsertHooks, metabolismHook) + case boil.AfterInsertHook: + metabolismAfterInsertHooks = append(metabolismAfterInsertHooks, metabolismHook) + case boil.BeforeUpdateHook: + metabolismBeforeUpdateHooks = append(metabolismBeforeUpdateHooks, metabolismHook) + case boil.AfterUpdateHook: + metabolismAfterUpdateHooks = append(metabolismAfterUpdateHooks, metabolismHook) + case boil.BeforeDeleteHook: + metabolismBeforeDeleteHooks = append(metabolismBeforeDeleteHooks, metabolismHook) + case boil.AfterDeleteHook: + metabolismAfterDeleteHooks = append(metabolismAfterDeleteHooks, metabolismHook) + case boil.BeforeUpsertHook: + metabolismBeforeUpsertHooks = append(metabolismBeforeUpsertHooks, metabolismHook) + case boil.AfterUpsertHook: + metabolismAfterUpsertHooks = append(metabolismAfterUpsertHooks, metabolismHook) + } +} + +// One returns a single metabolism record from the query. +func (q metabolismQuery) One(ctx context.Context, exec boil.ContextExecutor) (*Metabolism, error) { + o := &Metabolism{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for metabolism") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all Metabolism records from the query. +func (q metabolismQuery) All(ctx context.Context, exec boil.ContextExecutor) (MetabolismSlice, error) { + var o []*Metabolism + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to Metabolism slice") + } + + if len(metabolismAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all Metabolism records in the query. +func (q metabolismQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count metabolism rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q metabolismQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if metabolism exists") + } + + return count > 0, nil +} + +// EnzymeTidTargetDictionary pointed to by the foreign key. +func (o *Metabolism) EnzymeTidTargetDictionary(mods ...qm.QueryMod) targetDictionaryQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"tid\" = ?", o.EnzymeTid), + } + + queryMods = append(queryMods, mods...) + + return TargetDictionaries(queryMods...) +} + +// SubstrateRecord pointed to by the foreign key. +func (o *Metabolism) SubstrateRecord(mods ...qm.QueryMod) compoundRecordQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"record_id\" = ?", o.SubstrateRecordID), + } + + queryMods = append(queryMods, mods...) + + return CompoundRecords(queryMods...) +} + +// MetaboliteRecord pointed to by the foreign key. +func (o *Metabolism) MetaboliteRecord(mods ...qm.QueryMod) compoundRecordQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"record_id\" = ?", o.MetaboliteRecordID), + } + + queryMods = append(queryMods, mods...) + + return CompoundRecords(queryMods...) +} + +// DrugRecord pointed to by the foreign key. +func (o *Metabolism) DrugRecord(mods ...qm.QueryMod) compoundRecordQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"record_id\" = ?", o.DrugRecordID), + } + + queryMods = append(queryMods, mods...) + + return CompoundRecords(queryMods...) +} + +// MetMetabolismRefs retrieves all the metabolism_ref's MetabolismRefs with an executor via met_id column. +func (o *Metabolism) MetMetabolismRefs(mods ...qm.QueryMod) metabolismRefQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"metabolism_refs\".\"met_id\"=?", o.MetID), + ) + + return MetabolismRefs(queryMods...) +} + +// LoadEnzymeTidTargetDictionary allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (metabolismL) LoadEnzymeTidTargetDictionary(ctx context.Context, e boil.ContextExecutor, singular bool, maybeMetabolism interface{}, mods queries.Applicator) error { + var slice []*Metabolism + var object *Metabolism + + if singular { + object = maybeMetabolism.(*Metabolism) + } else { + slice = *maybeMetabolism.(*[]*Metabolism) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &metabolismR{} + } + if !queries.IsNil(object.EnzymeTid) { + args = append(args, object.EnzymeTid) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &metabolismR{} + } + + for _, a := range args { + if queries.Equal(a, obj.EnzymeTid) { + continue Outer + } + } + + if !queries.IsNil(obj.EnzymeTid) { + args = append(args, obj.EnzymeTid) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`target_dictionary`), + qm.WhereIn(`target_dictionary.tid in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load TargetDictionary") + } + + var resultSlice []*TargetDictionary + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice TargetDictionary") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for target_dictionary") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for target_dictionary") + } + + if len(metabolismAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.EnzymeTidTargetDictionary = foreign + if foreign.R == nil { + foreign.R = &targetDictionaryR{} + } + foreign.R.EnzymeTidMetabolisms = append(foreign.R.EnzymeTidMetabolisms, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.EnzymeTid, foreign.Tid) { + local.R.EnzymeTidTargetDictionary = foreign + if foreign.R == nil { + foreign.R = &targetDictionaryR{} + } + foreign.R.EnzymeTidMetabolisms = append(foreign.R.EnzymeTidMetabolisms, local) + break + } + } + } + + return nil +} + +// LoadSubstrateRecord allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (metabolismL) LoadSubstrateRecord(ctx context.Context, e boil.ContextExecutor, singular bool, maybeMetabolism interface{}, mods queries.Applicator) error { + var slice []*Metabolism + var object *Metabolism + + if singular { + object = maybeMetabolism.(*Metabolism) + } else { + slice = *maybeMetabolism.(*[]*Metabolism) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &metabolismR{} + } + if !queries.IsNil(object.SubstrateRecordID) { + args = append(args, object.SubstrateRecordID) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &metabolismR{} + } + + for _, a := range args { + if queries.Equal(a, obj.SubstrateRecordID) { + continue Outer + } + } + + if !queries.IsNil(obj.SubstrateRecordID) { + args = append(args, obj.SubstrateRecordID) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`compound_records`), + qm.WhereIn(`compound_records.record_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load CompoundRecord") + } + + var resultSlice []*CompoundRecord + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice CompoundRecord") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for compound_records") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for compound_records") + } + + if len(metabolismAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.SubstrateRecord = foreign + if foreign.R == nil { + foreign.R = &compoundRecordR{} + } + foreign.R.SubstrateRecordMetabolisms = append(foreign.R.SubstrateRecordMetabolisms, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.SubstrateRecordID, foreign.RecordID) { + local.R.SubstrateRecord = foreign + if foreign.R == nil { + foreign.R = &compoundRecordR{} + } + foreign.R.SubstrateRecordMetabolisms = append(foreign.R.SubstrateRecordMetabolisms, local) + break + } + } + } + + return nil +} + +// LoadMetaboliteRecord allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (metabolismL) LoadMetaboliteRecord(ctx context.Context, e boil.ContextExecutor, singular bool, maybeMetabolism interface{}, mods queries.Applicator) error { + var slice []*Metabolism + var object *Metabolism + + if singular { + object = maybeMetabolism.(*Metabolism) + } else { + slice = *maybeMetabolism.(*[]*Metabolism) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &metabolismR{} + } + if !queries.IsNil(object.MetaboliteRecordID) { + args = append(args, object.MetaboliteRecordID) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &metabolismR{} + } + + for _, a := range args { + if queries.Equal(a, obj.MetaboliteRecordID) { + continue Outer + } + } + + if !queries.IsNil(obj.MetaboliteRecordID) { + args = append(args, obj.MetaboliteRecordID) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`compound_records`), + qm.WhereIn(`compound_records.record_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load CompoundRecord") + } + + var resultSlice []*CompoundRecord + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice CompoundRecord") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for compound_records") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for compound_records") + } + + if len(metabolismAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.MetaboliteRecord = foreign + if foreign.R == nil { + foreign.R = &compoundRecordR{} + } + foreign.R.MetaboliteRecordMetabolisms = append(foreign.R.MetaboliteRecordMetabolisms, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.MetaboliteRecordID, foreign.RecordID) { + local.R.MetaboliteRecord = foreign + if foreign.R == nil { + foreign.R = &compoundRecordR{} + } + foreign.R.MetaboliteRecordMetabolisms = append(foreign.R.MetaboliteRecordMetabolisms, local) + break + } + } + } + + return nil +} + +// LoadDrugRecord allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (metabolismL) LoadDrugRecord(ctx context.Context, e boil.ContextExecutor, singular bool, maybeMetabolism interface{}, mods queries.Applicator) error { + var slice []*Metabolism + var object *Metabolism + + if singular { + object = maybeMetabolism.(*Metabolism) + } else { + slice = *maybeMetabolism.(*[]*Metabolism) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &metabolismR{} + } + if !queries.IsNil(object.DrugRecordID) { + args = append(args, object.DrugRecordID) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &metabolismR{} + } + + for _, a := range args { + if queries.Equal(a, obj.DrugRecordID) { + continue Outer + } + } + + if !queries.IsNil(obj.DrugRecordID) { + args = append(args, obj.DrugRecordID) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`compound_records`), + qm.WhereIn(`compound_records.record_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load CompoundRecord") + } + + var resultSlice []*CompoundRecord + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice CompoundRecord") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for compound_records") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for compound_records") + } + + if len(metabolismAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.DrugRecord = foreign + if foreign.R == nil { + foreign.R = &compoundRecordR{} + } + foreign.R.DrugRecordMetabolisms = append(foreign.R.DrugRecordMetabolisms, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.DrugRecordID, foreign.RecordID) { + local.R.DrugRecord = foreign + if foreign.R == nil { + foreign.R = &compoundRecordR{} + } + foreign.R.DrugRecordMetabolisms = append(foreign.R.DrugRecordMetabolisms, local) + break + } + } + } + + return nil +} + +// LoadMetMetabolismRefs allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (metabolismL) LoadMetMetabolismRefs(ctx context.Context, e boil.ContextExecutor, singular bool, maybeMetabolism interface{}, mods queries.Applicator) error { + var slice []*Metabolism + var object *Metabolism + + if singular { + object = maybeMetabolism.(*Metabolism) + } else { + slice = *maybeMetabolism.(*[]*Metabolism) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &metabolismR{} + } + args = append(args, object.MetID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &metabolismR{} + } + + for _, a := range args { + if a == obj.MetID { + continue Outer + } + } + + args = append(args, obj.MetID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`metabolism_refs`), + qm.WhereIn(`metabolism_refs.met_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load metabolism_refs") + } + + var resultSlice []*MetabolismRef + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice metabolism_refs") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on metabolism_refs") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for metabolism_refs") + } + + if len(metabolismRefAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.MetMetabolismRefs = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &metabolismRefR{} + } + foreign.R.Met = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.MetID == foreign.MetID { + local.R.MetMetabolismRefs = append(local.R.MetMetabolismRefs, foreign) + if foreign.R == nil { + foreign.R = &metabolismRefR{} + } + foreign.R.Met = local + break + } + } + } + + return nil +} + +// SetEnzymeTidTargetDictionary of the metabolism to the related item. +// Sets o.R.EnzymeTidTargetDictionary to related. +// Adds o to related.R.EnzymeTidMetabolisms. +func (o *Metabolism) SetEnzymeTidTargetDictionary(ctx context.Context, exec boil.ContextExecutor, insert bool, related *TargetDictionary) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"metabolism\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"enzyme_tid"}), + strmangle.WhereClause("\"", "\"", 0, metabolismPrimaryKeyColumns), + ) + values := []interface{}{related.Tid, o.MetID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.EnzymeTid, related.Tid) + if o.R == nil { + o.R = &metabolismR{ + EnzymeTidTargetDictionary: related, + } + } else { + o.R.EnzymeTidTargetDictionary = related + } + + if related.R == nil { + related.R = &targetDictionaryR{ + EnzymeTidMetabolisms: MetabolismSlice{o}, + } + } else { + related.R.EnzymeTidMetabolisms = append(related.R.EnzymeTidMetabolisms, o) + } + + return nil +} + +// RemoveEnzymeTidTargetDictionary relationship. +// Sets o.R.EnzymeTidTargetDictionary to nil. +// Removes o from all passed in related items' relationships struct. +func (o *Metabolism) RemoveEnzymeTidTargetDictionary(ctx context.Context, exec boil.ContextExecutor, related *TargetDictionary) error { + var err error + + queries.SetScanner(&o.EnzymeTid, nil) + if _, err = o.Update(ctx, exec, boil.Whitelist("enzyme_tid")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.EnzymeTidTargetDictionary = nil + } + if related == nil || related.R == nil { + return nil + } + + for i, ri := range related.R.EnzymeTidMetabolisms { + if queries.Equal(o.EnzymeTid, ri.EnzymeTid) { + continue + } + + ln := len(related.R.EnzymeTidMetabolisms) + if ln > 1 && i < ln-1 { + related.R.EnzymeTidMetabolisms[i] = related.R.EnzymeTidMetabolisms[ln-1] + } + related.R.EnzymeTidMetabolisms = related.R.EnzymeTidMetabolisms[:ln-1] + break + } + return nil +} + +// SetSubstrateRecord of the metabolism to the related item. +// Sets o.R.SubstrateRecord to related. +// Adds o to related.R.SubstrateRecordMetabolisms. +func (o *Metabolism) SetSubstrateRecord(ctx context.Context, exec boil.ContextExecutor, insert bool, related *CompoundRecord) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"metabolism\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"substrate_record_id"}), + strmangle.WhereClause("\"", "\"", 0, metabolismPrimaryKeyColumns), + ) + values := []interface{}{related.RecordID, o.MetID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.SubstrateRecordID, related.RecordID) + if o.R == nil { + o.R = &metabolismR{ + SubstrateRecord: related, + } + } else { + o.R.SubstrateRecord = related + } + + if related.R == nil { + related.R = &compoundRecordR{ + SubstrateRecordMetabolisms: MetabolismSlice{o}, + } + } else { + related.R.SubstrateRecordMetabolisms = append(related.R.SubstrateRecordMetabolisms, o) + } + + return nil +} + +// RemoveSubstrateRecord relationship. +// Sets o.R.SubstrateRecord to nil. +// Removes o from all passed in related items' relationships struct. +func (o *Metabolism) RemoveSubstrateRecord(ctx context.Context, exec boil.ContextExecutor, related *CompoundRecord) error { + var err error + + queries.SetScanner(&o.SubstrateRecordID, nil) + if _, err = o.Update(ctx, exec, boil.Whitelist("substrate_record_id")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.SubstrateRecord = nil + } + if related == nil || related.R == nil { + return nil + } + + for i, ri := range related.R.SubstrateRecordMetabolisms { + if queries.Equal(o.SubstrateRecordID, ri.SubstrateRecordID) { + continue + } + + ln := len(related.R.SubstrateRecordMetabolisms) + if ln > 1 && i < ln-1 { + related.R.SubstrateRecordMetabolisms[i] = related.R.SubstrateRecordMetabolisms[ln-1] + } + related.R.SubstrateRecordMetabolisms = related.R.SubstrateRecordMetabolisms[:ln-1] + break + } + return nil +} + +// SetMetaboliteRecord of the metabolism to the related item. +// Sets o.R.MetaboliteRecord to related. +// Adds o to related.R.MetaboliteRecordMetabolisms. +func (o *Metabolism) SetMetaboliteRecord(ctx context.Context, exec boil.ContextExecutor, insert bool, related *CompoundRecord) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"metabolism\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"metabolite_record_id"}), + strmangle.WhereClause("\"", "\"", 0, metabolismPrimaryKeyColumns), + ) + values := []interface{}{related.RecordID, o.MetID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.MetaboliteRecordID, related.RecordID) + if o.R == nil { + o.R = &metabolismR{ + MetaboliteRecord: related, + } + } else { + o.R.MetaboliteRecord = related + } + + if related.R == nil { + related.R = &compoundRecordR{ + MetaboliteRecordMetabolisms: MetabolismSlice{o}, + } + } else { + related.R.MetaboliteRecordMetabolisms = append(related.R.MetaboliteRecordMetabolisms, o) + } + + return nil +} + +// RemoveMetaboliteRecord relationship. +// Sets o.R.MetaboliteRecord to nil. +// Removes o from all passed in related items' relationships struct. +func (o *Metabolism) RemoveMetaboliteRecord(ctx context.Context, exec boil.ContextExecutor, related *CompoundRecord) error { + var err error + + queries.SetScanner(&o.MetaboliteRecordID, nil) + if _, err = o.Update(ctx, exec, boil.Whitelist("metabolite_record_id")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.MetaboliteRecord = nil + } + if related == nil || related.R == nil { + return nil + } + + for i, ri := range related.R.MetaboliteRecordMetabolisms { + if queries.Equal(o.MetaboliteRecordID, ri.MetaboliteRecordID) { + continue + } + + ln := len(related.R.MetaboliteRecordMetabolisms) + if ln > 1 && i < ln-1 { + related.R.MetaboliteRecordMetabolisms[i] = related.R.MetaboliteRecordMetabolisms[ln-1] + } + related.R.MetaboliteRecordMetabolisms = related.R.MetaboliteRecordMetabolisms[:ln-1] + break + } + return nil +} + +// SetDrugRecord of the metabolism to the related item. +// Sets o.R.DrugRecord to related. +// Adds o to related.R.DrugRecordMetabolisms. +func (o *Metabolism) SetDrugRecord(ctx context.Context, exec boil.ContextExecutor, insert bool, related *CompoundRecord) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"metabolism\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"drug_record_id"}), + strmangle.WhereClause("\"", "\"", 0, metabolismPrimaryKeyColumns), + ) + values := []interface{}{related.RecordID, o.MetID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.DrugRecordID, related.RecordID) + if o.R == nil { + o.R = &metabolismR{ + DrugRecord: related, + } + } else { + o.R.DrugRecord = related + } + + if related.R == nil { + related.R = &compoundRecordR{ + DrugRecordMetabolisms: MetabolismSlice{o}, + } + } else { + related.R.DrugRecordMetabolisms = append(related.R.DrugRecordMetabolisms, o) + } + + return nil +} + +// RemoveDrugRecord relationship. +// Sets o.R.DrugRecord to nil. +// Removes o from all passed in related items' relationships struct. +func (o *Metabolism) RemoveDrugRecord(ctx context.Context, exec boil.ContextExecutor, related *CompoundRecord) error { + var err error + + queries.SetScanner(&o.DrugRecordID, nil) + if _, err = o.Update(ctx, exec, boil.Whitelist("drug_record_id")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.DrugRecord = nil + } + if related == nil || related.R == nil { + return nil + } + + for i, ri := range related.R.DrugRecordMetabolisms { + if queries.Equal(o.DrugRecordID, ri.DrugRecordID) { + continue + } + + ln := len(related.R.DrugRecordMetabolisms) + if ln > 1 && i < ln-1 { + related.R.DrugRecordMetabolisms[i] = related.R.DrugRecordMetabolisms[ln-1] + } + related.R.DrugRecordMetabolisms = related.R.DrugRecordMetabolisms[:ln-1] + break + } + return nil +} + +// AddMetMetabolismRefs adds the given related objects to the existing relationships +// of the metabolism, optionally inserting them as new records. +// Appends related to o.R.MetMetabolismRefs. +// Sets related.R.Met appropriately. +func (o *Metabolism) AddMetMetabolismRefs(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*MetabolismRef) error { + var err error + for _, rel := range related { + if insert { + rel.MetID = o.MetID + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"metabolism_refs\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"met_id"}), + strmangle.WhereClause("\"", "\"", 0, metabolismRefPrimaryKeyColumns), + ) + values := []interface{}{o.MetID, rel.MetrefID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.MetID = o.MetID + } + } + + if o.R == nil { + o.R = &metabolismR{ + MetMetabolismRefs: related, + } + } else { + o.R.MetMetabolismRefs = append(o.R.MetMetabolismRefs, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &metabolismRefR{ + Met: o, + } + } else { + rel.R.Met = o + } + } + return nil +} + +// Metabolisms retrieves all the records using an executor. +func Metabolisms(mods ...qm.QueryMod) metabolismQuery { + mods = append(mods, qm.From("\"metabolism\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"metabolism\".*"}) + } + + return metabolismQuery{q} +} + +// FindMetabolism retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindMetabolism(ctx context.Context, exec boil.ContextExecutor, metID int64, selectCols ...string) (*Metabolism, error) { + metabolismObj := &Metabolism{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"metabolism\" where \"met_id\"=?", sel, + ) + + q := queries.Raw(query, metID) + + err := q.Bind(ctx, exec, metabolismObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from metabolism") + } + + if err = metabolismObj.doAfterSelectHooks(ctx, exec); err != nil { + return metabolismObj, err + } + + return metabolismObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *Metabolism) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no metabolism provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(metabolismColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + metabolismInsertCacheMut.RLock() + cache, cached := metabolismInsertCache[key] + metabolismInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + metabolismAllColumns, + metabolismColumnsWithDefault, + metabolismColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(metabolismType, metabolismMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(metabolismType, metabolismMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"metabolism\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"metabolism\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into metabolism") + } + + if !cached { + metabolismInsertCacheMut.Lock() + metabolismInsertCache[key] = cache + metabolismInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the Metabolism. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *Metabolism) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + metabolismUpdateCacheMut.RLock() + cache, cached := metabolismUpdateCache[key] + metabolismUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + metabolismAllColumns, + metabolismPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update metabolism, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"metabolism\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, metabolismPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(metabolismType, metabolismMapping, append(wl, metabolismPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update metabolism row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for metabolism") + } + + if !cached { + metabolismUpdateCacheMut.Lock() + metabolismUpdateCache[key] = cache + metabolismUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q metabolismQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for metabolism") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for metabolism") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o MetabolismSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), metabolismPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"metabolism\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, metabolismPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in metabolism slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all metabolism") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *Metabolism) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no metabolism provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(metabolismColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + metabolismUpsertCacheMut.RLock() + cache, cached := metabolismUpsertCache[key] + metabolismUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + metabolismAllColumns, + metabolismColumnsWithDefault, + metabolismColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + metabolismAllColumns, + metabolismPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert metabolism, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(metabolismPrimaryKeyColumns)) + copy(conflict, metabolismPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"metabolism\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(metabolismType, metabolismMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(metabolismType, metabolismMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert metabolism") + } + + if !cached { + metabolismUpsertCacheMut.Lock() + metabolismUpsertCache[key] = cache + metabolismUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single Metabolism record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *Metabolism) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no Metabolism provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), metabolismPrimaryKeyMapping) + sql := "DELETE FROM \"metabolism\" WHERE \"met_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from metabolism") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for metabolism") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q metabolismQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no metabolismQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from metabolism") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for metabolism") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o MetabolismSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(metabolismBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), metabolismPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"metabolism\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, metabolismPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from metabolism slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for metabolism") + } + + if len(metabolismAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *Metabolism) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindMetabolism(ctx, exec, o.MetID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *MetabolismSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := MetabolismSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), metabolismPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"metabolism\".* FROM \"metabolism\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, metabolismPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in MetabolismSlice") + } + + *o = slice + + return nil +} + +// MetabolismExists checks if the Metabolism row exists. +func MetabolismExists(ctx context.Context, exec boil.ContextExecutor, metID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"metabolism\" where \"met_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, metID) + } + row := exec.QueryRowContext(ctx, sql, metID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if metabolism exists") + } + + return exists, nil +} diff --git a/models/metabolism_refs.go b/models/metabolism_refs.go new file mode 100644 index 0000000..36ff601 --- /dev/null +++ b/models/metabolism_refs.go @@ -0,0 +1,1084 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// MetabolismRef is an object representing the database table. +type MetabolismRef struct { + MetrefID int64 `boil:"metref_id" json:"metref_id" toml:"metref_id" yaml:"metref_id"` + MetID int64 `boil:"met_id" json:"met_id" toml:"met_id" yaml:"met_id"` + RefType string `boil:"ref_type" json:"ref_type" toml:"ref_type" yaml:"ref_type"` + RefID null.String `boil:"ref_id" json:"ref_id,omitempty" toml:"ref_id" yaml:"ref_id,omitempty"` + RefURL null.String `boil:"ref_url" json:"ref_url,omitempty" toml:"ref_url" yaml:"ref_url,omitempty"` + + R *metabolismRefR `boil:"-" json:"-" toml:"-" yaml:"-"` + L metabolismRefL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var MetabolismRefColumns = struct { + MetrefID string + MetID string + RefType string + RefID string + RefURL string +}{ + MetrefID: "metref_id", + MetID: "met_id", + RefType: "ref_type", + RefID: "ref_id", + RefURL: "ref_url", +} + +var MetabolismRefTableColumns = struct { + MetrefID string + MetID string + RefType string + RefID string + RefURL string +}{ + MetrefID: "metabolism_refs.metref_id", + MetID: "metabolism_refs.met_id", + RefType: "metabolism_refs.ref_type", + RefID: "metabolism_refs.ref_id", + RefURL: "metabolism_refs.ref_url", +} + +// Generated where + +var MetabolismRefWhere = struct { + MetrefID whereHelperint64 + MetID whereHelperint64 + RefType whereHelperstring + RefID whereHelpernull_String + RefURL whereHelpernull_String +}{ + MetrefID: whereHelperint64{field: "\"metabolism_refs\".\"metref_id\""}, + MetID: whereHelperint64{field: "\"metabolism_refs\".\"met_id\""}, + RefType: whereHelperstring{field: "\"metabolism_refs\".\"ref_type\""}, + RefID: whereHelpernull_String{field: "\"metabolism_refs\".\"ref_id\""}, + RefURL: whereHelpernull_String{field: "\"metabolism_refs\".\"ref_url\""}, +} + +// MetabolismRefRels is where relationship names are stored. +var MetabolismRefRels = struct { + Met string +}{ + Met: "Met", +} + +// metabolismRefR is where relationships are stored. +type metabolismRefR struct { + Met *Metabolism `boil:"Met" json:"Met" toml:"Met" yaml:"Met"` +} + +// NewStruct creates a new relationship struct +func (*metabolismRefR) NewStruct() *metabolismRefR { + return &metabolismRefR{} +} + +func (r *metabolismRefR) GetMet() *Metabolism { + if r == nil { + return nil + } + return r.Met +} + +// metabolismRefL is where Load methods for each relationship are stored. +type metabolismRefL struct{} + +var ( + metabolismRefAllColumns = []string{"metref_id", "met_id", "ref_type", "ref_id", "ref_url"} + metabolismRefColumnsWithoutDefault = []string{"metref_id", "met_id", "ref_type"} + metabolismRefColumnsWithDefault = []string{"ref_id", "ref_url"} + metabolismRefPrimaryKeyColumns = []string{"metref_id"} + metabolismRefGeneratedColumns = []string{} +) + +type ( + // MetabolismRefSlice is an alias for a slice of pointers to MetabolismRef. + // This should almost always be used instead of []MetabolismRef. + MetabolismRefSlice []*MetabolismRef + // MetabolismRefHook is the signature for custom MetabolismRef hook methods + MetabolismRefHook func(context.Context, boil.ContextExecutor, *MetabolismRef) error + + metabolismRefQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + metabolismRefType = reflect.TypeOf(&MetabolismRef{}) + metabolismRefMapping = queries.MakeStructMapping(metabolismRefType) + metabolismRefPrimaryKeyMapping, _ = queries.BindMapping(metabolismRefType, metabolismRefMapping, metabolismRefPrimaryKeyColumns) + metabolismRefInsertCacheMut sync.RWMutex + metabolismRefInsertCache = make(map[string]insertCache) + metabolismRefUpdateCacheMut sync.RWMutex + metabolismRefUpdateCache = make(map[string]updateCache) + metabolismRefUpsertCacheMut sync.RWMutex + metabolismRefUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var metabolismRefAfterSelectHooks []MetabolismRefHook + +var metabolismRefBeforeInsertHooks []MetabolismRefHook +var metabolismRefAfterInsertHooks []MetabolismRefHook + +var metabolismRefBeforeUpdateHooks []MetabolismRefHook +var metabolismRefAfterUpdateHooks []MetabolismRefHook + +var metabolismRefBeforeDeleteHooks []MetabolismRefHook +var metabolismRefAfterDeleteHooks []MetabolismRefHook + +var metabolismRefBeforeUpsertHooks []MetabolismRefHook +var metabolismRefAfterUpsertHooks []MetabolismRefHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *MetabolismRef) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range metabolismRefAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *MetabolismRef) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range metabolismRefBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *MetabolismRef) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range metabolismRefAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *MetabolismRef) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range metabolismRefBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *MetabolismRef) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range metabolismRefAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *MetabolismRef) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range metabolismRefBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *MetabolismRef) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range metabolismRefAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *MetabolismRef) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range metabolismRefBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *MetabolismRef) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range metabolismRefAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddMetabolismRefHook registers your hook function for all future operations. +func AddMetabolismRefHook(hookPoint boil.HookPoint, metabolismRefHook MetabolismRefHook) { + switch hookPoint { + case boil.AfterSelectHook: + metabolismRefAfterSelectHooks = append(metabolismRefAfterSelectHooks, metabolismRefHook) + case boil.BeforeInsertHook: + metabolismRefBeforeInsertHooks = append(metabolismRefBeforeInsertHooks, metabolismRefHook) + case boil.AfterInsertHook: + metabolismRefAfterInsertHooks = append(metabolismRefAfterInsertHooks, metabolismRefHook) + case boil.BeforeUpdateHook: + metabolismRefBeforeUpdateHooks = append(metabolismRefBeforeUpdateHooks, metabolismRefHook) + case boil.AfterUpdateHook: + metabolismRefAfterUpdateHooks = append(metabolismRefAfterUpdateHooks, metabolismRefHook) + case boil.BeforeDeleteHook: + metabolismRefBeforeDeleteHooks = append(metabolismRefBeforeDeleteHooks, metabolismRefHook) + case boil.AfterDeleteHook: + metabolismRefAfterDeleteHooks = append(metabolismRefAfterDeleteHooks, metabolismRefHook) + case boil.BeforeUpsertHook: + metabolismRefBeforeUpsertHooks = append(metabolismRefBeforeUpsertHooks, metabolismRefHook) + case boil.AfterUpsertHook: + metabolismRefAfterUpsertHooks = append(metabolismRefAfterUpsertHooks, metabolismRefHook) + } +} + +// One returns a single metabolismRef record from the query. +func (q metabolismRefQuery) One(ctx context.Context, exec boil.ContextExecutor) (*MetabolismRef, error) { + o := &MetabolismRef{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for metabolism_refs") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all MetabolismRef records from the query. +func (q metabolismRefQuery) All(ctx context.Context, exec boil.ContextExecutor) (MetabolismRefSlice, error) { + var o []*MetabolismRef + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to MetabolismRef slice") + } + + if len(metabolismRefAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all MetabolismRef records in the query. +func (q metabolismRefQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count metabolism_refs rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q metabolismRefQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if metabolism_refs exists") + } + + return count > 0, nil +} + +// Met pointed to by the foreign key. +func (o *MetabolismRef) Met(mods ...qm.QueryMod) metabolismQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"met_id\" = ?", o.MetID), + } + + queryMods = append(queryMods, mods...) + + return Metabolisms(queryMods...) +} + +// LoadMet allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (metabolismRefL) LoadMet(ctx context.Context, e boil.ContextExecutor, singular bool, maybeMetabolismRef interface{}, mods queries.Applicator) error { + var slice []*MetabolismRef + var object *MetabolismRef + + if singular { + object = maybeMetabolismRef.(*MetabolismRef) + } else { + slice = *maybeMetabolismRef.(*[]*MetabolismRef) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &metabolismRefR{} + } + args = append(args, object.MetID) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &metabolismRefR{} + } + + for _, a := range args { + if a == obj.MetID { + continue Outer + } + } + + args = append(args, obj.MetID) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`metabolism`), + qm.WhereIn(`metabolism.met_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load Metabolism") + } + + var resultSlice []*Metabolism + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice Metabolism") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for metabolism") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for metabolism") + } + + if len(metabolismRefAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Met = foreign + if foreign.R == nil { + foreign.R = &metabolismR{} + } + foreign.R.MetMetabolismRefs = append(foreign.R.MetMetabolismRefs, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.MetID == foreign.MetID { + local.R.Met = foreign + if foreign.R == nil { + foreign.R = &metabolismR{} + } + foreign.R.MetMetabolismRefs = append(foreign.R.MetMetabolismRefs, local) + break + } + } + } + + return nil +} + +// SetMet of the metabolismRef to the related item. +// Sets o.R.Met to related. +// Adds o to related.R.MetMetabolismRefs. +func (o *MetabolismRef) SetMet(ctx context.Context, exec boil.ContextExecutor, insert bool, related *Metabolism) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"metabolism_refs\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"met_id"}), + strmangle.WhereClause("\"", "\"", 0, metabolismRefPrimaryKeyColumns), + ) + values := []interface{}{related.MetID, o.MetrefID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.MetID = related.MetID + if o.R == nil { + o.R = &metabolismRefR{ + Met: related, + } + } else { + o.R.Met = related + } + + if related.R == nil { + related.R = &metabolismR{ + MetMetabolismRefs: MetabolismRefSlice{o}, + } + } else { + related.R.MetMetabolismRefs = append(related.R.MetMetabolismRefs, o) + } + + return nil +} + +// MetabolismRefs retrieves all the records using an executor. +func MetabolismRefs(mods ...qm.QueryMod) metabolismRefQuery { + mods = append(mods, qm.From("\"metabolism_refs\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"metabolism_refs\".*"}) + } + + return metabolismRefQuery{q} +} + +// FindMetabolismRef retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindMetabolismRef(ctx context.Context, exec boil.ContextExecutor, metrefID int64, selectCols ...string) (*MetabolismRef, error) { + metabolismRefObj := &MetabolismRef{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"metabolism_refs\" where \"metref_id\"=?", sel, + ) + + q := queries.Raw(query, metrefID) + + err := q.Bind(ctx, exec, metabolismRefObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from metabolism_refs") + } + + if err = metabolismRefObj.doAfterSelectHooks(ctx, exec); err != nil { + return metabolismRefObj, err + } + + return metabolismRefObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *MetabolismRef) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no metabolism_refs provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(metabolismRefColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + metabolismRefInsertCacheMut.RLock() + cache, cached := metabolismRefInsertCache[key] + metabolismRefInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + metabolismRefAllColumns, + metabolismRefColumnsWithDefault, + metabolismRefColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(metabolismRefType, metabolismRefMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(metabolismRefType, metabolismRefMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"metabolism_refs\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"metabolism_refs\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into metabolism_refs") + } + + if !cached { + metabolismRefInsertCacheMut.Lock() + metabolismRefInsertCache[key] = cache + metabolismRefInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the MetabolismRef. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *MetabolismRef) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + metabolismRefUpdateCacheMut.RLock() + cache, cached := metabolismRefUpdateCache[key] + metabolismRefUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + metabolismRefAllColumns, + metabolismRefPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update metabolism_refs, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"metabolism_refs\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, metabolismRefPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(metabolismRefType, metabolismRefMapping, append(wl, metabolismRefPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update metabolism_refs row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for metabolism_refs") + } + + if !cached { + metabolismRefUpdateCacheMut.Lock() + metabolismRefUpdateCache[key] = cache + metabolismRefUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q metabolismRefQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for metabolism_refs") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for metabolism_refs") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o MetabolismRefSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), metabolismRefPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"metabolism_refs\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, metabolismRefPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in metabolismRef slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all metabolismRef") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *MetabolismRef) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no metabolism_refs provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(metabolismRefColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + metabolismRefUpsertCacheMut.RLock() + cache, cached := metabolismRefUpsertCache[key] + metabolismRefUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + metabolismRefAllColumns, + metabolismRefColumnsWithDefault, + metabolismRefColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + metabolismRefAllColumns, + metabolismRefPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert metabolism_refs, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(metabolismRefPrimaryKeyColumns)) + copy(conflict, metabolismRefPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"metabolism_refs\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(metabolismRefType, metabolismRefMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(metabolismRefType, metabolismRefMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert metabolism_refs") + } + + if !cached { + metabolismRefUpsertCacheMut.Lock() + metabolismRefUpsertCache[key] = cache + metabolismRefUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single MetabolismRef record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *MetabolismRef) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no MetabolismRef provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), metabolismRefPrimaryKeyMapping) + sql := "DELETE FROM \"metabolism_refs\" WHERE \"metref_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from metabolism_refs") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for metabolism_refs") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q metabolismRefQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no metabolismRefQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from metabolism_refs") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for metabolism_refs") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o MetabolismRefSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(metabolismRefBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), metabolismRefPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"metabolism_refs\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, metabolismRefPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from metabolismRef slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for metabolism_refs") + } + + if len(metabolismRefAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *MetabolismRef) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindMetabolismRef(ctx, exec, o.MetrefID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *MetabolismRefSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := MetabolismRefSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), metabolismRefPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"metabolism_refs\".* FROM \"metabolism_refs\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, metabolismRefPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in MetabolismRefSlice") + } + + *o = slice + + return nil +} + +// MetabolismRefExists checks if the MetabolismRef row exists. +func MetabolismRefExists(ctx context.Context, exec boil.ContextExecutor, metrefID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"metabolism_refs\" where \"metref_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, metrefID) + } + row := exec.QueryRowContext(ctx, sql, metrefID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if metabolism_refs exists") + } + + return exists, nil +} diff --git a/models/molecule_atc_classification.go b/models/molecule_atc_classification.go new file mode 100644 index 0000000..cbda1b7 --- /dev/null +++ b/models/molecule_atc_classification.go @@ -0,0 +1,1241 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// MoleculeAtcClassification is an object representing the database table. +type MoleculeAtcClassification struct { + MolAtcID int64 `boil:"mol_atc_id" json:"mol_atc_id" toml:"mol_atc_id" yaml:"mol_atc_id"` + Level5 string `boil:"level5" json:"level5" toml:"level5" yaml:"level5"` + Molregno int64 `boil:"molregno" json:"molregno" toml:"molregno" yaml:"molregno"` + + R *moleculeAtcClassificationR `boil:"-" json:"-" toml:"-" yaml:"-"` + L moleculeAtcClassificationL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var MoleculeAtcClassificationColumns = struct { + MolAtcID string + Level5 string + Molregno string +}{ + MolAtcID: "mol_atc_id", + Level5: "level5", + Molregno: "molregno", +} + +var MoleculeAtcClassificationTableColumns = struct { + MolAtcID string + Level5 string + Molregno string +}{ + MolAtcID: "molecule_atc_classification.mol_atc_id", + Level5: "molecule_atc_classification.level5", + Molregno: "molecule_atc_classification.molregno", +} + +// Generated where + +var MoleculeAtcClassificationWhere = struct { + MolAtcID whereHelperint64 + Level5 whereHelperstring + Molregno whereHelperint64 +}{ + MolAtcID: whereHelperint64{field: "\"molecule_atc_classification\".\"mol_atc_id\""}, + Level5: whereHelperstring{field: "\"molecule_atc_classification\".\"level5\""}, + Molregno: whereHelperint64{field: "\"molecule_atc_classification\".\"molregno\""}, +} + +// MoleculeAtcClassificationRels is where relationship names are stored. +var MoleculeAtcClassificationRels = struct { + MolregnoMoleculeDictionary string + Level5AtcClassification string +}{ + MolregnoMoleculeDictionary: "MolregnoMoleculeDictionary", + Level5AtcClassification: "Level5AtcClassification", +} + +// moleculeAtcClassificationR is where relationships are stored. +type moleculeAtcClassificationR struct { + MolregnoMoleculeDictionary *MoleculeDictionary `boil:"MolregnoMoleculeDictionary" json:"MolregnoMoleculeDictionary" toml:"MolregnoMoleculeDictionary" yaml:"MolregnoMoleculeDictionary"` + Level5AtcClassification *AtcClassification `boil:"Level5AtcClassification" json:"Level5AtcClassification" toml:"Level5AtcClassification" yaml:"Level5AtcClassification"` +} + +// NewStruct creates a new relationship struct +func (*moleculeAtcClassificationR) NewStruct() *moleculeAtcClassificationR { + return &moleculeAtcClassificationR{} +} + +func (r *moleculeAtcClassificationR) GetMolregnoMoleculeDictionary() *MoleculeDictionary { + if r == nil { + return nil + } + return r.MolregnoMoleculeDictionary +} + +func (r *moleculeAtcClassificationR) GetLevel5AtcClassification() *AtcClassification { + if r == nil { + return nil + } + return r.Level5AtcClassification +} + +// moleculeAtcClassificationL is where Load methods for each relationship are stored. +type moleculeAtcClassificationL struct{} + +var ( + moleculeAtcClassificationAllColumns = []string{"mol_atc_id", "level5", "molregno"} + moleculeAtcClassificationColumnsWithoutDefault = []string{"mol_atc_id", "level5", "molregno"} + moleculeAtcClassificationColumnsWithDefault = []string{} + moleculeAtcClassificationPrimaryKeyColumns = []string{"mol_atc_id"} + moleculeAtcClassificationGeneratedColumns = []string{} +) + +type ( + // MoleculeAtcClassificationSlice is an alias for a slice of pointers to MoleculeAtcClassification. + // This should almost always be used instead of []MoleculeAtcClassification. + MoleculeAtcClassificationSlice []*MoleculeAtcClassification + // MoleculeAtcClassificationHook is the signature for custom MoleculeAtcClassification hook methods + MoleculeAtcClassificationHook func(context.Context, boil.ContextExecutor, *MoleculeAtcClassification) error + + moleculeAtcClassificationQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + moleculeAtcClassificationType = reflect.TypeOf(&MoleculeAtcClassification{}) + moleculeAtcClassificationMapping = queries.MakeStructMapping(moleculeAtcClassificationType) + moleculeAtcClassificationPrimaryKeyMapping, _ = queries.BindMapping(moleculeAtcClassificationType, moleculeAtcClassificationMapping, moleculeAtcClassificationPrimaryKeyColumns) + moleculeAtcClassificationInsertCacheMut sync.RWMutex + moleculeAtcClassificationInsertCache = make(map[string]insertCache) + moleculeAtcClassificationUpdateCacheMut sync.RWMutex + moleculeAtcClassificationUpdateCache = make(map[string]updateCache) + moleculeAtcClassificationUpsertCacheMut sync.RWMutex + moleculeAtcClassificationUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var moleculeAtcClassificationAfterSelectHooks []MoleculeAtcClassificationHook + +var moleculeAtcClassificationBeforeInsertHooks []MoleculeAtcClassificationHook +var moleculeAtcClassificationAfterInsertHooks []MoleculeAtcClassificationHook + +var moleculeAtcClassificationBeforeUpdateHooks []MoleculeAtcClassificationHook +var moleculeAtcClassificationAfterUpdateHooks []MoleculeAtcClassificationHook + +var moleculeAtcClassificationBeforeDeleteHooks []MoleculeAtcClassificationHook +var moleculeAtcClassificationAfterDeleteHooks []MoleculeAtcClassificationHook + +var moleculeAtcClassificationBeforeUpsertHooks []MoleculeAtcClassificationHook +var moleculeAtcClassificationAfterUpsertHooks []MoleculeAtcClassificationHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *MoleculeAtcClassification) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeAtcClassificationAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *MoleculeAtcClassification) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeAtcClassificationBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *MoleculeAtcClassification) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeAtcClassificationAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *MoleculeAtcClassification) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeAtcClassificationBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *MoleculeAtcClassification) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeAtcClassificationAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *MoleculeAtcClassification) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeAtcClassificationBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *MoleculeAtcClassification) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeAtcClassificationAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *MoleculeAtcClassification) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeAtcClassificationBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *MoleculeAtcClassification) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeAtcClassificationAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddMoleculeAtcClassificationHook registers your hook function for all future operations. +func AddMoleculeAtcClassificationHook(hookPoint boil.HookPoint, moleculeAtcClassificationHook MoleculeAtcClassificationHook) { + switch hookPoint { + case boil.AfterSelectHook: + moleculeAtcClassificationAfterSelectHooks = append(moleculeAtcClassificationAfterSelectHooks, moleculeAtcClassificationHook) + case boil.BeforeInsertHook: + moleculeAtcClassificationBeforeInsertHooks = append(moleculeAtcClassificationBeforeInsertHooks, moleculeAtcClassificationHook) + case boil.AfterInsertHook: + moleculeAtcClassificationAfterInsertHooks = append(moleculeAtcClassificationAfterInsertHooks, moleculeAtcClassificationHook) + case boil.BeforeUpdateHook: + moleculeAtcClassificationBeforeUpdateHooks = append(moleculeAtcClassificationBeforeUpdateHooks, moleculeAtcClassificationHook) + case boil.AfterUpdateHook: + moleculeAtcClassificationAfterUpdateHooks = append(moleculeAtcClassificationAfterUpdateHooks, moleculeAtcClassificationHook) + case boil.BeforeDeleteHook: + moleculeAtcClassificationBeforeDeleteHooks = append(moleculeAtcClassificationBeforeDeleteHooks, moleculeAtcClassificationHook) + case boil.AfterDeleteHook: + moleculeAtcClassificationAfterDeleteHooks = append(moleculeAtcClassificationAfterDeleteHooks, moleculeAtcClassificationHook) + case boil.BeforeUpsertHook: + moleculeAtcClassificationBeforeUpsertHooks = append(moleculeAtcClassificationBeforeUpsertHooks, moleculeAtcClassificationHook) + case boil.AfterUpsertHook: + moleculeAtcClassificationAfterUpsertHooks = append(moleculeAtcClassificationAfterUpsertHooks, moleculeAtcClassificationHook) + } +} + +// One returns a single moleculeAtcClassification record from the query. +func (q moleculeAtcClassificationQuery) One(ctx context.Context, exec boil.ContextExecutor) (*MoleculeAtcClassification, error) { + o := &MoleculeAtcClassification{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for molecule_atc_classification") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all MoleculeAtcClassification records from the query. +func (q moleculeAtcClassificationQuery) All(ctx context.Context, exec boil.ContextExecutor) (MoleculeAtcClassificationSlice, error) { + var o []*MoleculeAtcClassification + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to MoleculeAtcClassification slice") + } + + if len(moleculeAtcClassificationAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all MoleculeAtcClassification records in the query. +func (q moleculeAtcClassificationQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count molecule_atc_classification rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q moleculeAtcClassificationQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if molecule_atc_classification exists") + } + + return count > 0, nil +} + +// MolregnoMoleculeDictionary pointed to by the foreign key. +func (o *MoleculeAtcClassification) MolregnoMoleculeDictionary(mods ...qm.QueryMod) moleculeDictionaryQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"molregno\" = ?", o.Molregno), + } + + queryMods = append(queryMods, mods...) + + return MoleculeDictionaries(queryMods...) +} + +// Level5AtcClassification pointed to by the foreign key. +func (o *MoleculeAtcClassification) Level5AtcClassification(mods ...qm.QueryMod) atcClassificationQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"level5\" = ?", o.Level5), + } + + queryMods = append(queryMods, mods...) + + return AtcClassifications(queryMods...) +} + +// LoadMolregnoMoleculeDictionary allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (moleculeAtcClassificationL) LoadMolregnoMoleculeDictionary(ctx context.Context, e boil.ContextExecutor, singular bool, maybeMoleculeAtcClassification interface{}, mods queries.Applicator) error { + var slice []*MoleculeAtcClassification + var object *MoleculeAtcClassification + + if singular { + object = maybeMoleculeAtcClassification.(*MoleculeAtcClassification) + } else { + slice = *maybeMoleculeAtcClassification.(*[]*MoleculeAtcClassification) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &moleculeAtcClassificationR{} + } + args = append(args, object.Molregno) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &moleculeAtcClassificationR{} + } + + for _, a := range args { + if a == obj.Molregno { + continue Outer + } + } + + args = append(args, obj.Molregno) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`molecule_dictionary`), + qm.WhereIn(`molecule_dictionary.molregno in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load MoleculeDictionary") + } + + var resultSlice []*MoleculeDictionary + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice MoleculeDictionary") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for molecule_dictionary") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for molecule_dictionary") + } + + if len(moleculeAtcClassificationAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.MolregnoMoleculeDictionary = foreign + if foreign.R == nil { + foreign.R = &moleculeDictionaryR{} + } + foreign.R.MolregnoMoleculeAtcClassifications = append(foreign.R.MolregnoMoleculeAtcClassifications, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.Molregno == foreign.Molregno { + local.R.MolregnoMoleculeDictionary = foreign + if foreign.R == nil { + foreign.R = &moleculeDictionaryR{} + } + foreign.R.MolregnoMoleculeAtcClassifications = append(foreign.R.MolregnoMoleculeAtcClassifications, local) + break + } + } + } + + return nil +} + +// LoadLevel5AtcClassification allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (moleculeAtcClassificationL) LoadLevel5AtcClassification(ctx context.Context, e boil.ContextExecutor, singular bool, maybeMoleculeAtcClassification interface{}, mods queries.Applicator) error { + var slice []*MoleculeAtcClassification + var object *MoleculeAtcClassification + + if singular { + object = maybeMoleculeAtcClassification.(*MoleculeAtcClassification) + } else { + slice = *maybeMoleculeAtcClassification.(*[]*MoleculeAtcClassification) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &moleculeAtcClassificationR{} + } + args = append(args, object.Level5) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &moleculeAtcClassificationR{} + } + + for _, a := range args { + if a == obj.Level5 { + continue Outer + } + } + + args = append(args, obj.Level5) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`atc_classification`), + qm.WhereIn(`atc_classification.level5 in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load AtcClassification") + } + + var resultSlice []*AtcClassification + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice AtcClassification") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for atc_classification") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for atc_classification") + } + + if len(moleculeAtcClassificationAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Level5AtcClassification = foreign + if foreign.R == nil { + foreign.R = &atcClassificationR{} + } + foreign.R.Level5MoleculeAtcClassifications = append(foreign.R.Level5MoleculeAtcClassifications, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.Level5 == foreign.Level5 { + local.R.Level5AtcClassification = foreign + if foreign.R == nil { + foreign.R = &atcClassificationR{} + } + foreign.R.Level5MoleculeAtcClassifications = append(foreign.R.Level5MoleculeAtcClassifications, local) + break + } + } + } + + return nil +} + +// SetMolregnoMoleculeDictionary of the moleculeAtcClassification to the related item. +// Sets o.R.MolregnoMoleculeDictionary to related. +// Adds o to related.R.MolregnoMoleculeAtcClassifications. +func (o *MoleculeAtcClassification) SetMolregnoMoleculeDictionary(ctx context.Context, exec boil.ContextExecutor, insert bool, related *MoleculeDictionary) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"molecule_atc_classification\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"molregno"}), + strmangle.WhereClause("\"", "\"", 0, moleculeAtcClassificationPrimaryKeyColumns), + ) + values := []interface{}{related.Molregno, o.MolAtcID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.Molregno = related.Molregno + if o.R == nil { + o.R = &moleculeAtcClassificationR{ + MolregnoMoleculeDictionary: related, + } + } else { + o.R.MolregnoMoleculeDictionary = related + } + + if related.R == nil { + related.R = &moleculeDictionaryR{ + MolregnoMoleculeAtcClassifications: MoleculeAtcClassificationSlice{o}, + } + } else { + related.R.MolregnoMoleculeAtcClassifications = append(related.R.MolregnoMoleculeAtcClassifications, o) + } + + return nil +} + +// SetLevel5AtcClassification of the moleculeAtcClassification to the related item. +// Sets o.R.Level5AtcClassification to related. +// Adds o to related.R.Level5MoleculeAtcClassifications. +func (o *MoleculeAtcClassification) SetLevel5AtcClassification(ctx context.Context, exec boil.ContextExecutor, insert bool, related *AtcClassification) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"molecule_atc_classification\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"level5"}), + strmangle.WhereClause("\"", "\"", 0, moleculeAtcClassificationPrimaryKeyColumns), + ) + values := []interface{}{related.Level5, o.MolAtcID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.Level5 = related.Level5 + if o.R == nil { + o.R = &moleculeAtcClassificationR{ + Level5AtcClassification: related, + } + } else { + o.R.Level5AtcClassification = related + } + + if related.R == nil { + related.R = &atcClassificationR{ + Level5MoleculeAtcClassifications: MoleculeAtcClassificationSlice{o}, + } + } else { + related.R.Level5MoleculeAtcClassifications = append(related.R.Level5MoleculeAtcClassifications, o) + } + + return nil +} + +// MoleculeAtcClassifications retrieves all the records using an executor. +func MoleculeAtcClassifications(mods ...qm.QueryMod) moleculeAtcClassificationQuery { + mods = append(mods, qm.From("\"molecule_atc_classification\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"molecule_atc_classification\".*"}) + } + + return moleculeAtcClassificationQuery{q} +} + +// FindMoleculeAtcClassification retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindMoleculeAtcClassification(ctx context.Context, exec boil.ContextExecutor, molAtcID int64, selectCols ...string) (*MoleculeAtcClassification, error) { + moleculeAtcClassificationObj := &MoleculeAtcClassification{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"molecule_atc_classification\" where \"mol_atc_id\"=?", sel, + ) + + q := queries.Raw(query, molAtcID) + + err := q.Bind(ctx, exec, moleculeAtcClassificationObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from molecule_atc_classification") + } + + if err = moleculeAtcClassificationObj.doAfterSelectHooks(ctx, exec); err != nil { + return moleculeAtcClassificationObj, err + } + + return moleculeAtcClassificationObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *MoleculeAtcClassification) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no molecule_atc_classification provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(moleculeAtcClassificationColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + moleculeAtcClassificationInsertCacheMut.RLock() + cache, cached := moleculeAtcClassificationInsertCache[key] + moleculeAtcClassificationInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + moleculeAtcClassificationAllColumns, + moleculeAtcClassificationColumnsWithDefault, + moleculeAtcClassificationColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(moleculeAtcClassificationType, moleculeAtcClassificationMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(moleculeAtcClassificationType, moleculeAtcClassificationMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"molecule_atc_classification\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"molecule_atc_classification\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into molecule_atc_classification") + } + + if !cached { + moleculeAtcClassificationInsertCacheMut.Lock() + moleculeAtcClassificationInsertCache[key] = cache + moleculeAtcClassificationInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the MoleculeAtcClassification. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *MoleculeAtcClassification) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + moleculeAtcClassificationUpdateCacheMut.RLock() + cache, cached := moleculeAtcClassificationUpdateCache[key] + moleculeAtcClassificationUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + moleculeAtcClassificationAllColumns, + moleculeAtcClassificationPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update molecule_atc_classification, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"molecule_atc_classification\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, moleculeAtcClassificationPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(moleculeAtcClassificationType, moleculeAtcClassificationMapping, append(wl, moleculeAtcClassificationPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update molecule_atc_classification row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for molecule_atc_classification") + } + + if !cached { + moleculeAtcClassificationUpdateCacheMut.Lock() + moleculeAtcClassificationUpdateCache[key] = cache + moleculeAtcClassificationUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q moleculeAtcClassificationQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for molecule_atc_classification") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for molecule_atc_classification") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o MoleculeAtcClassificationSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), moleculeAtcClassificationPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"molecule_atc_classification\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, moleculeAtcClassificationPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in moleculeAtcClassification slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all moleculeAtcClassification") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *MoleculeAtcClassification) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no molecule_atc_classification provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(moleculeAtcClassificationColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + moleculeAtcClassificationUpsertCacheMut.RLock() + cache, cached := moleculeAtcClassificationUpsertCache[key] + moleculeAtcClassificationUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + moleculeAtcClassificationAllColumns, + moleculeAtcClassificationColumnsWithDefault, + moleculeAtcClassificationColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + moleculeAtcClassificationAllColumns, + moleculeAtcClassificationPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert molecule_atc_classification, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(moleculeAtcClassificationPrimaryKeyColumns)) + copy(conflict, moleculeAtcClassificationPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"molecule_atc_classification\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(moleculeAtcClassificationType, moleculeAtcClassificationMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(moleculeAtcClassificationType, moleculeAtcClassificationMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert molecule_atc_classification") + } + + if !cached { + moleculeAtcClassificationUpsertCacheMut.Lock() + moleculeAtcClassificationUpsertCache[key] = cache + moleculeAtcClassificationUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single MoleculeAtcClassification record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *MoleculeAtcClassification) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no MoleculeAtcClassification provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), moleculeAtcClassificationPrimaryKeyMapping) + sql := "DELETE FROM \"molecule_atc_classification\" WHERE \"mol_atc_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from molecule_atc_classification") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for molecule_atc_classification") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q moleculeAtcClassificationQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no moleculeAtcClassificationQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from molecule_atc_classification") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for molecule_atc_classification") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o MoleculeAtcClassificationSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(moleculeAtcClassificationBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), moleculeAtcClassificationPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"molecule_atc_classification\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, moleculeAtcClassificationPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from moleculeAtcClassification slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for molecule_atc_classification") + } + + if len(moleculeAtcClassificationAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *MoleculeAtcClassification) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindMoleculeAtcClassification(ctx, exec, o.MolAtcID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *MoleculeAtcClassificationSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := MoleculeAtcClassificationSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), moleculeAtcClassificationPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"molecule_atc_classification\".* FROM \"molecule_atc_classification\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, moleculeAtcClassificationPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in MoleculeAtcClassificationSlice") + } + + *o = slice + + return nil +} + +// MoleculeAtcClassificationExists checks if the MoleculeAtcClassification row exists. +func MoleculeAtcClassificationExists(ctx context.Context, exec boil.ContextExecutor, molAtcID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"molecule_atc_classification\" where \"mol_atc_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, molAtcID) + } + row := exec.QueryRowContext(ctx, sql, molAtcID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if molecule_atc_classification exists") + } + + return exists, nil +} diff --git a/models/molecule_dictionary.go b/models/molecule_dictionary.go new file mode 100644 index 0000000..8f2a632 --- /dev/null +++ b/models/molecule_dictionary.go @@ -0,0 +1,4747 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// MoleculeDictionary is an object representing the database table. +type MoleculeDictionary struct { + Molregno int64 `boil:"molregno" json:"molregno" toml:"molregno" yaml:"molregno"` + PrefName null.String `boil:"pref_name" json:"pref_name,omitempty" toml:"pref_name" yaml:"pref_name,omitempty"` + ChemblID string `boil:"chembl_id" json:"chembl_id" toml:"chembl_id" yaml:"chembl_id"` + MaxPhase int16 `boil:"max_phase" json:"max_phase" toml:"max_phase" yaml:"max_phase"` + TherapeuticFlag int16 `boil:"therapeutic_flag" json:"therapeutic_flag" toml:"therapeutic_flag" yaml:"therapeutic_flag"` + DosedIngredient int16 `boil:"dosed_ingredient" json:"dosed_ingredient" toml:"dosed_ingredient" yaml:"dosed_ingredient"` + StructureType string `boil:"structure_type" json:"structure_type" toml:"structure_type" yaml:"structure_type"` + ChebiParID null.Int64 `boil:"chebi_par_id" json:"chebi_par_id,omitempty" toml:"chebi_par_id" yaml:"chebi_par_id,omitempty"` + MoleculeType null.String `boil:"molecule_type" json:"molecule_type,omitempty" toml:"molecule_type" yaml:"molecule_type,omitempty"` + FirstApproval null.Int64 `boil:"first_approval" json:"first_approval,omitempty" toml:"first_approval" yaml:"first_approval,omitempty"` + Oral int16 `boil:"oral" json:"oral" toml:"oral" yaml:"oral"` + Parenteral int16 `boil:"parenteral" json:"parenteral" toml:"parenteral" yaml:"parenteral"` + Topical int16 `boil:"topical" json:"topical" toml:"topical" yaml:"topical"` + BlackBoxWarning int16 `boil:"black_box_warning" json:"black_box_warning" toml:"black_box_warning" yaml:"black_box_warning"` + NaturalProduct int16 `boil:"natural_product" json:"natural_product" toml:"natural_product" yaml:"natural_product"` + FirstInClass int16 `boil:"first_in_class" json:"first_in_class" toml:"first_in_class" yaml:"first_in_class"` + Chirality int16 `boil:"chirality" json:"chirality" toml:"chirality" yaml:"chirality"` + Prodrug int16 `boil:"prodrug" json:"prodrug" toml:"prodrug" yaml:"prodrug"` + InorganicFlag int16 `boil:"inorganic_flag" json:"inorganic_flag" toml:"inorganic_flag" yaml:"inorganic_flag"` + UsanYear null.Int64 `boil:"usan_year" json:"usan_year,omitempty" toml:"usan_year" yaml:"usan_year,omitempty"` + AvailabilityType null.Int16 `boil:"availability_type" json:"availability_type,omitempty" toml:"availability_type" yaml:"availability_type,omitempty"` + UsanStem null.String `boil:"usan_stem" json:"usan_stem,omitempty" toml:"usan_stem" yaml:"usan_stem,omitempty"` + PolymerFlag null.Int16 `boil:"polymer_flag" json:"polymer_flag,omitempty" toml:"polymer_flag" yaml:"polymer_flag,omitempty"` + UsanSubstem null.String `boil:"usan_substem" json:"usan_substem,omitempty" toml:"usan_substem" yaml:"usan_substem,omitempty"` + UsanStemDefinition null.String `boil:"usan_stem_definition" json:"usan_stem_definition,omitempty" toml:"usan_stem_definition" yaml:"usan_stem_definition,omitempty"` + IndicationClass null.String `boil:"indication_class" json:"indication_class,omitempty" toml:"indication_class" yaml:"indication_class,omitempty"` + WithdrawnFlag int16 `boil:"withdrawn_flag" json:"withdrawn_flag" toml:"withdrawn_flag" yaml:"withdrawn_flag"` + WithdrawnYear null.Int64 `boil:"withdrawn_year" json:"withdrawn_year,omitempty" toml:"withdrawn_year" yaml:"withdrawn_year,omitempty"` + WithdrawnCountry null.String `boil:"withdrawn_country" json:"withdrawn_country,omitempty" toml:"withdrawn_country" yaml:"withdrawn_country,omitempty"` + WithdrawnReason null.String `boil:"withdrawn_reason" json:"withdrawn_reason,omitempty" toml:"withdrawn_reason" yaml:"withdrawn_reason,omitempty"` + WithdrawnClass null.String `boil:"withdrawn_class" json:"withdrawn_class,omitempty" toml:"withdrawn_class" yaml:"withdrawn_class,omitempty"` + + R *moleculeDictionaryR `boil:"-" json:"-" toml:"-" yaml:"-"` + L moleculeDictionaryL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var MoleculeDictionaryColumns = struct { + Molregno string + PrefName string + ChemblID string + MaxPhase string + TherapeuticFlag string + DosedIngredient string + StructureType string + ChebiParID string + MoleculeType string + FirstApproval string + Oral string + Parenteral string + Topical string + BlackBoxWarning string + NaturalProduct string + FirstInClass string + Chirality string + Prodrug string + InorganicFlag string + UsanYear string + AvailabilityType string + UsanStem string + PolymerFlag string + UsanSubstem string + UsanStemDefinition string + IndicationClass string + WithdrawnFlag string + WithdrawnYear string + WithdrawnCountry string + WithdrawnReason string + WithdrawnClass string +}{ + Molregno: "molregno", + PrefName: "pref_name", + ChemblID: "chembl_id", + MaxPhase: "max_phase", + TherapeuticFlag: "therapeutic_flag", + DosedIngredient: "dosed_ingredient", + StructureType: "structure_type", + ChebiParID: "chebi_par_id", + MoleculeType: "molecule_type", + FirstApproval: "first_approval", + Oral: "oral", + Parenteral: "parenteral", + Topical: "topical", + BlackBoxWarning: "black_box_warning", + NaturalProduct: "natural_product", + FirstInClass: "first_in_class", + Chirality: "chirality", + Prodrug: "prodrug", + InorganicFlag: "inorganic_flag", + UsanYear: "usan_year", + AvailabilityType: "availability_type", + UsanStem: "usan_stem", + PolymerFlag: "polymer_flag", + UsanSubstem: "usan_substem", + UsanStemDefinition: "usan_stem_definition", + IndicationClass: "indication_class", + WithdrawnFlag: "withdrawn_flag", + WithdrawnYear: "withdrawn_year", + WithdrawnCountry: "withdrawn_country", + WithdrawnReason: "withdrawn_reason", + WithdrawnClass: "withdrawn_class", +} + +var MoleculeDictionaryTableColumns = struct { + Molregno string + PrefName string + ChemblID string + MaxPhase string + TherapeuticFlag string + DosedIngredient string + StructureType string + ChebiParID string + MoleculeType string + FirstApproval string + Oral string + Parenteral string + Topical string + BlackBoxWarning string + NaturalProduct string + FirstInClass string + Chirality string + Prodrug string + InorganicFlag string + UsanYear string + AvailabilityType string + UsanStem string + PolymerFlag string + UsanSubstem string + UsanStemDefinition string + IndicationClass string + WithdrawnFlag string + WithdrawnYear string + WithdrawnCountry string + WithdrawnReason string + WithdrawnClass string +}{ + Molregno: "molecule_dictionary.molregno", + PrefName: "molecule_dictionary.pref_name", + ChemblID: "molecule_dictionary.chembl_id", + MaxPhase: "molecule_dictionary.max_phase", + TherapeuticFlag: "molecule_dictionary.therapeutic_flag", + DosedIngredient: "molecule_dictionary.dosed_ingredient", + StructureType: "molecule_dictionary.structure_type", + ChebiParID: "molecule_dictionary.chebi_par_id", + MoleculeType: "molecule_dictionary.molecule_type", + FirstApproval: "molecule_dictionary.first_approval", + Oral: "molecule_dictionary.oral", + Parenteral: "molecule_dictionary.parenteral", + Topical: "molecule_dictionary.topical", + BlackBoxWarning: "molecule_dictionary.black_box_warning", + NaturalProduct: "molecule_dictionary.natural_product", + FirstInClass: "molecule_dictionary.first_in_class", + Chirality: "molecule_dictionary.chirality", + Prodrug: "molecule_dictionary.prodrug", + InorganicFlag: "molecule_dictionary.inorganic_flag", + UsanYear: "molecule_dictionary.usan_year", + AvailabilityType: "molecule_dictionary.availability_type", + UsanStem: "molecule_dictionary.usan_stem", + PolymerFlag: "molecule_dictionary.polymer_flag", + UsanSubstem: "molecule_dictionary.usan_substem", + UsanStemDefinition: "molecule_dictionary.usan_stem_definition", + IndicationClass: "molecule_dictionary.indication_class", + WithdrawnFlag: "molecule_dictionary.withdrawn_flag", + WithdrawnYear: "molecule_dictionary.withdrawn_year", + WithdrawnCountry: "molecule_dictionary.withdrawn_country", + WithdrawnReason: "molecule_dictionary.withdrawn_reason", + WithdrawnClass: "molecule_dictionary.withdrawn_class", +} + +// Generated where + +var MoleculeDictionaryWhere = struct { + Molregno whereHelperint64 + PrefName whereHelpernull_String + ChemblID whereHelperstring + MaxPhase whereHelperint16 + TherapeuticFlag whereHelperint16 + DosedIngredient whereHelperint16 + StructureType whereHelperstring + ChebiParID whereHelpernull_Int64 + MoleculeType whereHelpernull_String + FirstApproval whereHelpernull_Int64 + Oral whereHelperint16 + Parenteral whereHelperint16 + Topical whereHelperint16 + BlackBoxWarning whereHelperint16 + NaturalProduct whereHelperint16 + FirstInClass whereHelperint16 + Chirality whereHelperint16 + Prodrug whereHelperint16 + InorganicFlag whereHelperint16 + UsanYear whereHelpernull_Int64 + AvailabilityType whereHelpernull_Int16 + UsanStem whereHelpernull_String + PolymerFlag whereHelpernull_Int16 + UsanSubstem whereHelpernull_String + UsanStemDefinition whereHelpernull_String + IndicationClass whereHelpernull_String + WithdrawnFlag whereHelperint16 + WithdrawnYear whereHelpernull_Int64 + WithdrawnCountry whereHelpernull_String + WithdrawnReason whereHelpernull_String + WithdrawnClass whereHelpernull_String +}{ + Molregno: whereHelperint64{field: "\"molecule_dictionary\".\"molregno\""}, + PrefName: whereHelpernull_String{field: "\"molecule_dictionary\".\"pref_name\""}, + ChemblID: whereHelperstring{field: "\"molecule_dictionary\".\"chembl_id\""}, + MaxPhase: whereHelperint16{field: "\"molecule_dictionary\".\"max_phase\""}, + TherapeuticFlag: whereHelperint16{field: "\"molecule_dictionary\".\"therapeutic_flag\""}, + DosedIngredient: whereHelperint16{field: "\"molecule_dictionary\".\"dosed_ingredient\""}, + StructureType: whereHelperstring{field: "\"molecule_dictionary\".\"structure_type\""}, + ChebiParID: whereHelpernull_Int64{field: "\"molecule_dictionary\".\"chebi_par_id\""}, + MoleculeType: whereHelpernull_String{field: "\"molecule_dictionary\".\"molecule_type\""}, + FirstApproval: whereHelpernull_Int64{field: "\"molecule_dictionary\".\"first_approval\""}, + Oral: whereHelperint16{field: "\"molecule_dictionary\".\"oral\""}, + Parenteral: whereHelperint16{field: "\"molecule_dictionary\".\"parenteral\""}, + Topical: whereHelperint16{field: "\"molecule_dictionary\".\"topical\""}, + BlackBoxWarning: whereHelperint16{field: "\"molecule_dictionary\".\"black_box_warning\""}, + NaturalProduct: whereHelperint16{field: "\"molecule_dictionary\".\"natural_product\""}, + FirstInClass: whereHelperint16{field: "\"molecule_dictionary\".\"first_in_class\""}, + Chirality: whereHelperint16{field: "\"molecule_dictionary\".\"chirality\""}, + Prodrug: whereHelperint16{field: "\"molecule_dictionary\".\"prodrug\""}, + InorganicFlag: whereHelperint16{field: "\"molecule_dictionary\".\"inorganic_flag\""}, + UsanYear: whereHelpernull_Int64{field: "\"molecule_dictionary\".\"usan_year\""}, + AvailabilityType: whereHelpernull_Int16{field: "\"molecule_dictionary\".\"availability_type\""}, + UsanStem: whereHelpernull_String{field: "\"molecule_dictionary\".\"usan_stem\""}, + PolymerFlag: whereHelpernull_Int16{field: "\"molecule_dictionary\".\"polymer_flag\""}, + UsanSubstem: whereHelpernull_String{field: "\"molecule_dictionary\".\"usan_substem\""}, + UsanStemDefinition: whereHelpernull_String{field: "\"molecule_dictionary\".\"usan_stem_definition\""}, + IndicationClass: whereHelpernull_String{field: "\"molecule_dictionary\".\"indication_class\""}, + WithdrawnFlag: whereHelperint16{field: "\"molecule_dictionary\".\"withdrawn_flag\""}, + WithdrawnYear: whereHelpernull_Int64{field: "\"molecule_dictionary\".\"withdrawn_year\""}, + WithdrawnCountry: whereHelpernull_String{field: "\"molecule_dictionary\".\"withdrawn_country\""}, + WithdrawnReason: whereHelpernull_String{field: "\"molecule_dictionary\".\"withdrawn_reason\""}, + WithdrawnClass: whereHelpernull_String{field: "\"molecule_dictionary\".\"withdrawn_class\""}, +} + +// MoleculeDictionaryRels is where relationship names are stored. +var MoleculeDictionaryRels = struct { + Chembl string + MolregnoBiotherapeutic string + MolregnoCompoundProperty string + MolregnoCompoundStructure string + MolregnoMoleculeHierarchy string + MolregnoActivities string + MolregnoCompoundRecords string + MolregnoCompoundStructuralAlerts string + MolregnoDrugIndications string + MolregnoDrugMechanisms string + MolregnoFormulations string + MolregnoMoleculeAtcClassifications string + MolregnoMoleculeFracClassifications string + ParentMolregnoMoleculeHierarchies string + ActiveMolregnoMoleculeHierarchies string + MolregnoMoleculeHracClassifications string + MolregnoMoleculeIracClassifications string + MolregnoMoleculeSynonyms string +}{ + Chembl: "Chembl", + MolregnoBiotherapeutic: "MolregnoBiotherapeutic", + MolregnoCompoundProperty: "MolregnoCompoundProperty", + MolregnoCompoundStructure: "MolregnoCompoundStructure", + MolregnoMoleculeHierarchy: "MolregnoMoleculeHierarchy", + MolregnoActivities: "MolregnoActivities", + MolregnoCompoundRecords: "MolregnoCompoundRecords", + MolregnoCompoundStructuralAlerts: "MolregnoCompoundStructuralAlerts", + MolregnoDrugIndications: "MolregnoDrugIndications", + MolregnoDrugMechanisms: "MolregnoDrugMechanisms", + MolregnoFormulations: "MolregnoFormulations", + MolregnoMoleculeAtcClassifications: "MolregnoMoleculeAtcClassifications", + MolregnoMoleculeFracClassifications: "MolregnoMoleculeFracClassifications", + ParentMolregnoMoleculeHierarchies: "ParentMolregnoMoleculeHierarchies", + ActiveMolregnoMoleculeHierarchies: "ActiveMolregnoMoleculeHierarchies", + MolregnoMoleculeHracClassifications: "MolregnoMoleculeHracClassifications", + MolregnoMoleculeIracClassifications: "MolregnoMoleculeIracClassifications", + MolregnoMoleculeSynonyms: "MolregnoMoleculeSynonyms", +} + +// moleculeDictionaryR is where relationships are stored. +type moleculeDictionaryR struct { + Chembl *ChemblIDLookup `boil:"Chembl" json:"Chembl" toml:"Chembl" yaml:"Chembl"` + MolregnoBiotherapeutic *Biotherapeutic `boil:"MolregnoBiotherapeutic" json:"MolregnoBiotherapeutic" toml:"MolregnoBiotherapeutic" yaml:"MolregnoBiotherapeutic"` + MolregnoCompoundProperty *CompoundProperty `boil:"MolregnoCompoundProperty" json:"MolregnoCompoundProperty" toml:"MolregnoCompoundProperty" yaml:"MolregnoCompoundProperty"` + MolregnoCompoundStructure *CompoundStructure `boil:"MolregnoCompoundStructure" json:"MolregnoCompoundStructure" toml:"MolregnoCompoundStructure" yaml:"MolregnoCompoundStructure"` + MolregnoMoleculeHierarchy *MoleculeHierarchy `boil:"MolregnoMoleculeHierarchy" json:"MolregnoMoleculeHierarchy" toml:"MolregnoMoleculeHierarchy" yaml:"MolregnoMoleculeHierarchy"` + MolregnoActivities ActivitySlice `boil:"MolregnoActivities" json:"MolregnoActivities" toml:"MolregnoActivities" yaml:"MolregnoActivities"` + MolregnoCompoundRecords CompoundRecordSlice `boil:"MolregnoCompoundRecords" json:"MolregnoCompoundRecords" toml:"MolregnoCompoundRecords" yaml:"MolregnoCompoundRecords"` + MolregnoCompoundStructuralAlerts CompoundStructuralAlertSlice `boil:"MolregnoCompoundStructuralAlerts" json:"MolregnoCompoundStructuralAlerts" toml:"MolregnoCompoundStructuralAlerts" yaml:"MolregnoCompoundStructuralAlerts"` + MolregnoDrugIndications DrugIndicationSlice `boil:"MolregnoDrugIndications" json:"MolregnoDrugIndications" toml:"MolregnoDrugIndications" yaml:"MolregnoDrugIndications"` + MolregnoDrugMechanisms DrugMechanismSlice `boil:"MolregnoDrugMechanisms" json:"MolregnoDrugMechanisms" toml:"MolregnoDrugMechanisms" yaml:"MolregnoDrugMechanisms"` + MolregnoFormulations FormulationSlice `boil:"MolregnoFormulations" json:"MolregnoFormulations" toml:"MolregnoFormulations" yaml:"MolregnoFormulations"` + MolregnoMoleculeAtcClassifications MoleculeAtcClassificationSlice `boil:"MolregnoMoleculeAtcClassifications" json:"MolregnoMoleculeAtcClassifications" toml:"MolregnoMoleculeAtcClassifications" yaml:"MolregnoMoleculeAtcClassifications"` + MolregnoMoleculeFracClassifications MoleculeFracClassificationSlice `boil:"MolregnoMoleculeFracClassifications" json:"MolregnoMoleculeFracClassifications" toml:"MolregnoMoleculeFracClassifications" yaml:"MolregnoMoleculeFracClassifications"` + ParentMolregnoMoleculeHierarchies MoleculeHierarchySlice `boil:"ParentMolregnoMoleculeHierarchies" json:"ParentMolregnoMoleculeHierarchies" toml:"ParentMolregnoMoleculeHierarchies" yaml:"ParentMolregnoMoleculeHierarchies"` + ActiveMolregnoMoleculeHierarchies MoleculeHierarchySlice `boil:"ActiveMolregnoMoleculeHierarchies" json:"ActiveMolregnoMoleculeHierarchies" toml:"ActiveMolregnoMoleculeHierarchies" yaml:"ActiveMolregnoMoleculeHierarchies"` + MolregnoMoleculeHracClassifications MoleculeHracClassificationSlice `boil:"MolregnoMoleculeHracClassifications" json:"MolregnoMoleculeHracClassifications" toml:"MolregnoMoleculeHracClassifications" yaml:"MolregnoMoleculeHracClassifications"` + MolregnoMoleculeIracClassifications MoleculeIracClassificationSlice `boil:"MolregnoMoleculeIracClassifications" json:"MolregnoMoleculeIracClassifications" toml:"MolregnoMoleculeIracClassifications" yaml:"MolregnoMoleculeIracClassifications"` + MolregnoMoleculeSynonyms MoleculeSynonymSlice `boil:"MolregnoMoleculeSynonyms" json:"MolregnoMoleculeSynonyms" toml:"MolregnoMoleculeSynonyms" yaml:"MolregnoMoleculeSynonyms"` +} + +// NewStruct creates a new relationship struct +func (*moleculeDictionaryR) NewStruct() *moleculeDictionaryR { + return &moleculeDictionaryR{} +} + +func (r *moleculeDictionaryR) GetChembl() *ChemblIDLookup { + if r == nil { + return nil + } + return r.Chembl +} + +func (r *moleculeDictionaryR) GetMolregnoBiotherapeutic() *Biotherapeutic { + if r == nil { + return nil + } + return r.MolregnoBiotherapeutic +} + +func (r *moleculeDictionaryR) GetMolregnoCompoundProperty() *CompoundProperty { + if r == nil { + return nil + } + return r.MolregnoCompoundProperty +} + +func (r *moleculeDictionaryR) GetMolregnoCompoundStructure() *CompoundStructure { + if r == nil { + return nil + } + return r.MolregnoCompoundStructure +} + +func (r *moleculeDictionaryR) GetMolregnoMoleculeHierarchy() *MoleculeHierarchy { + if r == nil { + return nil + } + return r.MolregnoMoleculeHierarchy +} + +func (r *moleculeDictionaryR) GetMolregnoActivities() ActivitySlice { + if r == nil { + return nil + } + return r.MolregnoActivities +} + +func (r *moleculeDictionaryR) GetMolregnoCompoundRecords() CompoundRecordSlice { + if r == nil { + return nil + } + return r.MolregnoCompoundRecords +} + +func (r *moleculeDictionaryR) GetMolregnoCompoundStructuralAlerts() CompoundStructuralAlertSlice { + if r == nil { + return nil + } + return r.MolregnoCompoundStructuralAlerts +} + +func (r *moleculeDictionaryR) GetMolregnoDrugIndications() DrugIndicationSlice { + if r == nil { + return nil + } + return r.MolregnoDrugIndications +} + +func (r *moleculeDictionaryR) GetMolregnoDrugMechanisms() DrugMechanismSlice { + if r == nil { + return nil + } + return r.MolregnoDrugMechanisms +} + +func (r *moleculeDictionaryR) GetMolregnoFormulations() FormulationSlice { + if r == nil { + return nil + } + return r.MolregnoFormulations +} + +func (r *moleculeDictionaryR) GetMolregnoMoleculeAtcClassifications() MoleculeAtcClassificationSlice { + if r == nil { + return nil + } + return r.MolregnoMoleculeAtcClassifications +} + +func (r *moleculeDictionaryR) GetMolregnoMoleculeFracClassifications() MoleculeFracClassificationSlice { + if r == nil { + return nil + } + return r.MolregnoMoleculeFracClassifications +} + +func (r *moleculeDictionaryR) GetParentMolregnoMoleculeHierarchies() MoleculeHierarchySlice { + if r == nil { + return nil + } + return r.ParentMolregnoMoleculeHierarchies +} + +func (r *moleculeDictionaryR) GetActiveMolregnoMoleculeHierarchies() MoleculeHierarchySlice { + if r == nil { + return nil + } + return r.ActiveMolregnoMoleculeHierarchies +} + +func (r *moleculeDictionaryR) GetMolregnoMoleculeHracClassifications() MoleculeHracClassificationSlice { + if r == nil { + return nil + } + return r.MolregnoMoleculeHracClassifications +} + +func (r *moleculeDictionaryR) GetMolregnoMoleculeIracClassifications() MoleculeIracClassificationSlice { + if r == nil { + return nil + } + return r.MolregnoMoleculeIracClassifications +} + +func (r *moleculeDictionaryR) GetMolregnoMoleculeSynonyms() MoleculeSynonymSlice { + if r == nil { + return nil + } + return r.MolregnoMoleculeSynonyms +} + +// moleculeDictionaryL is where Load methods for each relationship are stored. +type moleculeDictionaryL struct{} + +var ( + moleculeDictionaryAllColumns = []string{"molregno", "pref_name", "chembl_id", "max_phase", "therapeutic_flag", "dosed_ingredient", "structure_type", "chebi_par_id", "molecule_type", "first_approval", "oral", "parenteral", "topical", "black_box_warning", "natural_product", "first_in_class", "chirality", "prodrug", "inorganic_flag", "usan_year", "availability_type", "usan_stem", "polymer_flag", "usan_substem", "usan_stem_definition", "indication_class", "withdrawn_flag", "withdrawn_year", "withdrawn_country", "withdrawn_reason", "withdrawn_class"} + moleculeDictionaryColumnsWithoutDefault = []string{"molregno", "chembl_id", "max_phase", "therapeutic_flag", "dosed_ingredient", "structure_type", "oral", "parenteral", "topical", "black_box_warning", "natural_product", "first_in_class", "chirality", "prodrug", "inorganic_flag", "withdrawn_flag"} + moleculeDictionaryColumnsWithDefault = []string{"pref_name", "chebi_par_id", "molecule_type", "first_approval", "usan_year", "availability_type", "usan_stem", "polymer_flag", "usan_substem", "usan_stem_definition", "indication_class", "withdrawn_year", "withdrawn_country", "withdrawn_reason", "withdrawn_class"} + moleculeDictionaryPrimaryKeyColumns = []string{"molregno"} + moleculeDictionaryGeneratedColumns = []string{} +) + +type ( + // MoleculeDictionarySlice is an alias for a slice of pointers to MoleculeDictionary. + // This should almost always be used instead of []MoleculeDictionary. + MoleculeDictionarySlice []*MoleculeDictionary + // MoleculeDictionaryHook is the signature for custom MoleculeDictionary hook methods + MoleculeDictionaryHook func(context.Context, boil.ContextExecutor, *MoleculeDictionary) error + + moleculeDictionaryQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + moleculeDictionaryType = reflect.TypeOf(&MoleculeDictionary{}) + moleculeDictionaryMapping = queries.MakeStructMapping(moleculeDictionaryType) + moleculeDictionaryPrimaryKeyMapping, _ = queries.BindMapping(moleculeDictionaryType, moleculeDictionaryMapping, moleculeDictionaryPrimaryKeyColumns) + moleculeDictionaryInsertCacheMut sync.RWMutex + moleculeDictionaryInsertCache = make(map[string]insertCache) + moleculeDictionaryUpdateCacheMut sync.RWMutex + moleculeDictionaryUpdateCache = make(map[string]updateCache) + moleculeDictionaryUpsertCacheMut sync.RWMutex + moleculeDictionaryUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var moleculeDictionaryAfterSelectHooks []MoleculeDictionaryHook + +var moleculeDictionaryBeforeInsertHooks []MoleculeDictionaryHook +var moleculeDictionaryAfterInsertHooks []MoleculeDictionaryHook + +var moleculeDictionaryBeforeUpdateHooks []MoleculeDictionaryHook +var moleculeDictionaryAfterUpdateHooks []MoleculeDictionaryHook + +var moleculeDictionaryBeforeDeleteHooks []MoleculeDictionaryHook +var moleculeDictionaryAfterDeleteHooks []MoleculeDictionaryHook + +var moleculeDictionaryBeforeUpsertHooks []MoleculeDictionaryHook +var moleculeDictionaryAfterUpsertHooks []MoleculeDictionaryHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *MoleculeDictionary) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeDictionaryAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *MoleculeDictionary) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeDictionaryBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *MoleculeDictionary) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeDictionaryAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *MoleculeDictionary) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeDictionaryBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *MoleculeDictionary) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeDictionaryAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *MoleculeDictionary) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeDictionaryBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *MoleculeDictionary) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeDictionaryAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *MoleculeDictionary) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeDictionaryBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *MoleculeDictionary) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeDictionaryAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddMoleculeDictionaryHook registers your hook function for all future operations. +func AddMoleculeDictionaryHook(hookPoint boil.HookPoint, moleculeDictionaryHook MoleculeDictionaryHook) { + switch hookPoint { + case boil.AfterSelectHook: + moleculeDictionaryAfterSelectHooks = append(moleculeDictionaryAfterSelectHooks, moleculeDictionaryHook) + case boil.BeforeInsertHook: + moleculeDictionaryBeforeInsertHooks = append(moleculeDictionaryBeforeInsertHooks, moleculeDictionaryHook) + case boil.AfterInsertHook: + moleculeDictionaryAfterInsertHooks = append(moleculeDictionaryAfterInsertHooks, moleculeDictionaryHook) + case boil.BeforeUpdateHook: + moleculeDictionaryBeforeUpdateHooks = append(moleculeDictionaryBeforeUpdateHooks, moleculeDictionaryHook) + case boil.AfterUpdateHook: + moleculeDictionaryAfterUpdateHooks = append(moleculeDictionaryAfterUpdateHooks, moleculeDictionaryHook) + case boil.BeforeDeleteHook: + moleculeDictionaryBeforeDeleteHooks = append(moleculeDictionaryBeforeDeleteHooks, moleculeDictionaryHook) + case boil.AfterDeleteHook: + moleculeDictionaryAfterDeleteHooks = append(moleculeDictionaryAfterDeleteHooks, moleculeDictionaryHook) + case boil.BeforeUpsertHook: + moleculeDictionaryBeforeUpsertHooks = append(moleculeDictionaryBeforeUpsertHooks, moleculeDictionaryHook) + case boil.AfterUpsertHook: + moleculeDictionaryAfterUpsertHooks = append(moleculeDictionaryAfterUpsertHooks, moleculeDictionaryHook) + } +} + +// One returns a single moleculeDictionary record from the query. +func (q moleculeDictionaryQuery) One(ctx context.Context, exec boil.ContextExecutor) (*MoleculeDictionary, error) { + o := &MoleculeDictionary{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for molecule_dictionary") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all MoleculeDictionary records from the query. +func (q moleculeDictionaryQuery) All(ctx context.Context, exec boil.ContextExecutor) (MoleculeDictionarySlice, error) { + var o []*MoleculeDictionary + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to MoleculeDictionary slice") + } + + if len(moleculeDictionaryAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all MoleculeDictionary records in the query. +func (q moleculeDictionaryQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count molecule_dictionary rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q moleculeDictionaryQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if molecule_dictionary exists") + } + + return count > 0, nil +} + +// Chembl pointed to by the foreign key. +func (o *MoleculeDictionary) Chembl(mods ...qm.QueryMod) chemblIDLookupQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"chembl_id\" = ?", o.ChemblID), + } + + queryMods = append(queryMods, mods...) + + return ChemblIDLookups(queryMods...) +} + +// MolregnoBiotherapeutic pointed to by the foreign key. +func (o *MoleculeDictionary) MolregnoBiotherapeutic(mods ...qm.QueryMod) biotherapeuticQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"molregno\" = ?", o.Molregno), + } + + queryMods = append(queryMods, mods...) + + return Biotherapeutics(queryMods...) +} + +// MolregnoCompoundProperty pointed to by the foreign key. +func (o *MoleculeDictionary) MolregnoCompoundProperty(mods ...qm.QueryMod) compoundPropertyQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"molregno\" = ?", o.Molregno), + } + + queryMods = append(queryMods, mods...) + + return CompoundProperties(queryMods...) +} + +// MolregnoCompoundStructure pointed to by the foreign key. +func (o *MoleculeDictionary) MolregnoCompoundStructure(mods ...qm.QueryMod) compoundStructureQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"molregno\" = ?", o.Molregno), + } + + queryMods = append(queryMods, mods...) + + return CompoundStructures(queryMods...) +} + +// MolregnoMoleculeHierarchy pointed to by the foreign key. +func (o *MoleculeDictionary) MolregnoMoleculeHierarchy(mods ...qm.QueryMod) moleculeHierarchyQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"molregno\" = ?", o.Molregno), + } + + queryMods = append(queryMods, mods...) + + return MoleculeHierarchies(queryMods...) +} + +// MolregnoActivities retrieves all the activity's Activities with an executor via molregno column. +func (o *MoleculeDictionary) MolregnoActivities(mods ...qm.QueryMod) activityQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"activities\".\"molregno\"=?", o.Molregno), + ) + + return Activities(queryMods...) +} + +// MolregnoCompoundRecords retrieves all the compound_record's CompoundRecords with an executor via molregno column. +func (o *MoleculeDictionary) MolregnoCompoundRecords(mods ...qm.QueryMod) compoundRecordQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"compound_records\".\"molregno\"=?", o.Molregno), + ) + + return CompoundRecords(queryMods...) +} + +// MolregnoCompoundStructuralAlerts retrieves all the compound_structural_alert's CompoundStructuralAlerts with an executor via molregno column. +func (o *MoleculeDictionary) MolregnoCompoundStructuralAlerts(mods ...qm.QueryMod) compoundStructuralAlertQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"compound_structural_alerts\".\"molregno\"=?", o.Molregno), + ) + + return CompoundStructuralAlerts(queryMods...) +} + +// MolregnoDrugIndications retrieves all the drug_indication's DrugIndications with an executor via molregno column. +func (o *MoleculeDictionary) MolregnoDrugIndications(mods ...qm.QueryMod) drugIndicationQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"drug_indication\".\"molregno\"=?", o.Molregno), + ) + + return DrugIndications(queryMods...) +} + +// MolregnoDrugMechanisms retrieves all the drug_mechanism's DrugMechanisms with an executor via molregno column. +func (o *MoleculeDictionary) MolregnoDrugMechanisms(mods ...qm.QueryMod) drugMechanismQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"drug_mechanism\".\"molregno\"=?", o.Molregno), + ) + + return DrugMechanisms(queryMods...) +} + +// MolregnoFormulations retrieves all the formulation's Formulations with an executor via molregno column. +func (o *MoleculeDictionary) MolregnoFormulations(mods ...qm.QueryMod) formulationQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"formulations\".\"molregno\"=?", o.Molregno), + ) + + return Formulations(queryMods...) +} + +// MolregnoMoleculeAtcClassifications retrieves all the molecule_atc_classification's MoleculeAtcClassifications with an executor via molregno column. +func (o *MoleculeDictionary) MolregnoMoleculeAtcClassifications(mods ...qm.QueryMod) moleculeAtcClassificationQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"molecule_atc_classification\".\"molregno\"=?", o.Molregno), + ) + + return MoleculeAtcClassifications(queryMods...) +} + +// MolregnoMoleculeFracClassifications retrieves all the molecule_frac_classification's MoleculeFracClassifications with an executor via molregno column. +func (o *MoleculeDictionary) MolregnoMoleculeFracClassifications(mods ...qm.QueryMod) moleculeFracClassificationQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"molecule_frac_classification\".\"molregno\"=?", o.Molregno), + ) + + return MoleculeFracClassifications(queryMods...) +} + +// ParentMolregnoMoleculeHierarchies retrieves all the molecule_hierarchy's MoleculeHierarchies with an executor via parent_molregno column. +func (o *MoleculeDictionary) ParentMolregnoMoleculeHierarchies(mods ...qm.QueryMod) moleculeHierarchyQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"molecule_hierarchy\".\"parent_molregno\"=?", o.Molregno), + ) + + return MoleculeHierarchies(queryMods...) +} + +// ActiveMolregnoMoleculeHierarchies retrieves all the molecule_hierarchy's MoleculeHierarchies with an executor via active_molregno column. +func (o *MoleculeDictionary) ActiveMolregnoMoleculeHierarchies(mods ...qm.QueryMod) moleculeHierarchyQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"molecule_hierarchy\".\"active_molregno\"=?", o.Molregno), + ) + + return MoleculeHierarchies(queryMods...) +} + +// MolregnoMoleculeHracClassifications retrieves all the molecule_hrac_classification's MoleculeHracClassifications with an executor via molregno column. +func (o *MoleculeDictionary) MolregnoMoleculeHracClassifications(mods ...qm.QueryMod) moleculeHracClassificationQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"molecule_hrac_classification\".\"molregno\"=?", o.Molregno), + ) + + return MoleculeHracClassifications(queryMods...) +} + +// MolregnoMoleculeIracClassifications retrieves all the molecule_irac_classification's MoleculeIracClassifications with an executor via molregno column. +func (o *MoleculeDictionary) MolregnoMoleculeIracClassifications(mods ...qm.QueryMod) moleculeIracClassificationQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"molecule_irac_classification\".\"molregno\"=?", o.Molregno), + ) + + return MoleculeIracClassifications(queryMods...) +} + +// MolregnoMoleculeSynonyms retrieves all the molecule_synonym's MoleculeSynonyms with an executor via molregno column. +func (o *MoleculeDictionary) MolregnoMoleculeSynonyms(mods ...qm.QueryMod) moleculeSynonymQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"molecule_synonyms\".\"molregno\"=?", o.Molregno), + ) + + return MoleculeSynonyms(queryMods...) +} + +// LoadChembl allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (moleculeDictionaryL) LoadChembl(ctx context.Context, e boil.ContextExecutor, singular bool, maybeMoleculeDictionary interface{}, mods queries.Applicator) error { + var slice []*MoleculeDictionary + var object *MoleculeDictionary + + if singular { + object = maybeMoleculeDictionary.(*MoleculeDictionary) + } else { + slice = *maybeMoleculeDictionary.(*[]*MoleculeDictionary) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &moleculeDictionaryR{} + } + args = append(args, object.ChemblID) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &moleculeDictionaryR{} + } + + for _, a := range args { + if a == obj.ChemblID { + continue Outer + } + } + + args = append(args, obj.ChemblID) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`chembl_id_lookup`), + qm.WhereIn(`chembl_id_lookup.chembl_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load ChemblIDLookup") + } + + var resultSlice []*ChemblIDLookup + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice ChemblIDLookup") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for chembl_id_lookup") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for chembl_id_lookup") + } + + if len(moleculeDictionaryAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Chembl = foreign + if foreign.R == nil { + foreign.R = &chemblIDLookupR{} + } + foreign.R.ChemblMoleculeDictionary = object + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.ChemblID == foreign.ChemblID { + local.R.Chembl = foreign + if foreign.R == nil { + foreign.R = &chemblIDLookupR{} + } + foreign.R.ChemblMoleculeDictionary = local + break + } + } + } + + return nil +} + +// LoadMolregnoBiotherapeutic allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-1 relationship. +func (moleculeDictionaryL) LoadMolregnoBiotherapeutic(ctx context.Context, e boil.ContextExecutor, singular bool, maybeMoleculeDictionary interface{}, mods queries.Applicator) error { + var slice []*MoleculeDictionary + var object *MoleculeDictionary + + if singular { + object = maybeMoleculeDictionary.(*MoleculeDictionary) + } else { + slice = *maybeMoleculeDictionary.(*[]*MoleculeDictionary) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &moleculeDictionaryR{} + } + args = append(args, object.Molregno) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &moleculeDictionaryR{} + } + + for _, a := range args { + if a == obj.Molregno { + continue Outer + } + } + + args = append(args, obj.Molregno) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`biotherapeutics`), + qm.WhereIn(`biotherapeutics.molregno in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load Biotherapeutic") + } + + var resultSlice []*Biotherapeutic + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice Biotherapeutic") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for biotherapeutics") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for biotherapeutics") + } + + if len(moleculeDictionaryAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.MolregnoBiotherapeutic = foreign + if foreign.R == nil { + foreign.R = &biotherapeuticR{} + } + foreign.R.MolregnoMoleculeDictionary = object + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.Molregno == foreign.Molregno { + local.R.MolregnoBiotherapeutic = foreign + if foreign.R == nil { + foreign.R = &biotherapeuticR{} + } + foreign.R.MolregnoMoleculeDictionary = local + break + } + } + } + + return nil +} + +// LoadMolregnoCompoundProperty allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-1 relationship. +func (moleculeDictionaryL) LoadMolregnoCompoundProperty(ctx context.Context, e boil.ContextExecutor, singular bool, maybeMoleculeDictionary interface{}, mods queries.Applicator) error { + var slice []*MoleculeDictionary + var object *MoleculeDictionary + + if singular { + object = maybeMoleculeDictionary.(*MoleculeDictionary) + } else { + slice = *maybeMoleculeDictionary.(*[]*MoleculeDictionary) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &moleculeDictionaryR{} + } + args = append(args, object.Molregno) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &moleculeDictionaryR{} + } + + for _, a := range args { + if a == obj.Molregno { + continue Outer + } + } + + args = append(args, obj.Molregno) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`compound_properties`), + qm.WhereIn(`compound_properties.molregno in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load CompoundProperty") + } + + var resultSlice []*CompoundProperty + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice CompoundProperty") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for compound_properties") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for compound_properties") + } + + if len(moleculeDictionaryAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.MolregnoCompoundProperty = foreign + if foreign.R == nil { + foreign.R = &compoundPropertyR{} + } + foreign.R.MolregnoMoleculeDictionary = object + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.Molregno == foreign.Molregno { + local.R.MolregnoCompoundProperty = foreign + if foreign.R == nil { + foreign.R = &compoundPropertyR{} + } + foreign.R.MolregnoMoleculeDictionary = local + break + } + } + } + + return nil +} + +// LoadMolregnoCompoundStructure allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-1 relationship. +func (moleculeDictionaryL) LoadMolregnoCompoundStructure(ctx context.Context, e boil.ContextExecutor, singular bool, maybeMoleculeDictionary interface{}, mods queries.Applicator) error { + var slice []*MoleculeDictionary + var object *MoleculeDictionary + + if singular { + object = maybeMoleculeDictionary.(*MoleculeDictionary) + } else { + slice = *maybeMoleculeDictionary.(*[]*MoleculeDictionary) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &moleculeDictionaryR{} + } + args = append(args, object.Molregno) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &moleculeDictionaryR{} + } + + for _, a := range args { + if a == obj.Molregno { + continue Outer + } + } + + args = append(args, obj.Molregno) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`compound_structures`), + qm.WhereIn(`compound_structures.molregno in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load CompoundStructure") + } + + var resultSlice []*CompoundStructure + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice CompoundStructure") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for compound_structures") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for compound_structures") + } + + if len(moleculeDictionaryAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.MolregnoCompoundStructure = foreign + if foreign.R == nil { + foreign.R = &compoundStructureR{} + } + foreign.R.MolregnoMoleculeDictionary = object + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.Molregno == foreign.Molregno { + local.R.MolregnoCompoundStructure = foreign + if foreign.R == nil { + foreign.R = &compoundStructureR{} + } + foreign.R.MolregnoMoleculeDictionary = local + break + } + } + } + + return nil +} + +// LoadMolregnoMoleculeHierarchy allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-1 relationship. +func (moleculeDictionaryL) LoadMolregnoMoleculeHierarchy(ctx context.Context, e boil.ContextExecutor, singular bool, maybeMoleculeDictionary interface{}, mods queries.Applicator) error { + var slice []*MoleculeDictionary + var object *MoleculeDictionary + + if singular { + object = maybeMoleculeDictionary.(*MoleculeDictionary) + } else { + slice = *maybeMoleculeDictionary.(*[]*MoleculeDictionary) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &moleculeDictionaryR{} + } + args = append(args, object.Molregno) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &moleculeDictionaryR{} + } + + for _, a := range args { + if a == obj.Molregno { + continue Outer + } + } + + args = append(args, obj.Molregno) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`molecule_hierarchy`), + qm.WhereIn(`molecule_hierarchy.molregno in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load MoleculeHierarchy") + } + + var resultSlice []*MoleculeHierarchy + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice MoleculeHierarchy") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for molecule_hierarchy") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for molecule_hierarchy") + } + + if len(moleculeDictionaryAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.MolregnoMoleculeHierarchy = foreign + if foreign.R == nil { + foreign.R = &moleculeHierarchyR{} + } + foreign.R.MolregnoMoleculeDictionary = object + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.Molregno == foreign.Molregno { + local.R.MolregnoMoleculeHierarchy = foreign + if foreign.R == nil { + foreign.R = &moleculeHierarchyR{} + } + foreign.R.MolregnoMoleculeDictionary = local + break + } + } + } + + return nil +} + +// LoadMolregnoActivities allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (moleculeDictionaryL) LoadMolregnoActivities(ctx context.Context, e boil.ContextExecutor, singular bool, maybeMoleculeDictionary interface{}, mods queries.Applicator) error { + var slice []*MoleculeDictionary + var object *MoleculeDictionary + + if singular { + object = maybeMoleculeDictionary.(*MoleculeDictionary) + } else { + slice = *maybeMoleculeDictionary.(*[]*MoleculeDictionary) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &moleculeDictionaryR{} + } + args = append(args, object.Molregno) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &moleculeDictionaryR{} + } + + for _, a := range args { + if queries.Equal(a, obj.Molregno) { + continue Outer + } + } + + args = append(args, obj.Molregno) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`activities`), + qm.WhereIn(`activities.molregno in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load activities") + } + + var resultSlice []*Activity + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice activities") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on activities") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for activities") + } + + if len(activityAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.MolregnoActivities = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &activityR{} + } + foreign.R.MolregnoMoleculeDictionary = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.Molregno, foreign.Molregno) { + local.R.MolregnoActivities = append(local.R.MolregnoActivities, foreign) + if foreign.R == nil { + foreign.R = &activityR{} + } + foreign.R.MolregnoMoleculeDictionary = local + break + } + } + } + + return nil +} + +// LoadMolregnoCompoundRecords allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (moleculeDictionaryL) LoadMolregnoCompoundRecords(ctx context.Context, e boil.ContextExecutor, singular bool, maybeMoleculeDictionary interface{}, mods queries.Applicator) error { + var slice []*MoleculeDictionary + var object *MoleculeDictionary + + if singular { + object = maybeMoleculeDictionary.(*MoleculeDictionary) + } else { + slice = *maybeMoleculeDictionary.(*[]*MoleculeDictionary) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &moleculeDictionaryR{} + } + args = append(args, object.Molregno) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &moleculeDictionaryR{} + } + + for _, a := range args { + if queries.Equal(a, obj.Molregno) { + continue Outer + } + } + + args = append(args, obj.Molregno) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`compound_records`), + qm.WhereIn(`compound_records.molregno in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load compound_records") + } + + var resultSlice []*CompoundRecord + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice compound_records") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on compound_records") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for compound_records") + } + + if len(compoundRecordAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.MolregnoCompoundRecords = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &compoundRecordR{} + } + foreign.R.MolregnoMoleculeDictionary = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.Molregno, foreign.Molregno) { + local.R.MolregnoCompoundRecords = append(local.R.MolregnoCompoundRecords, foreign) + if foreign.R == nil { + foreign.R = &compoundRecordR{} + } + foreign.R.MolregnoMoleculeDictionary = local + break + } + } + } + + return nil +} + +// LoadMolregnoCompoundStructuralAlerts allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (moleculeDictionaryL) LoadMolregnoCompoundStructuralAlerts(ctx context.Context, e boil.ContextExecutor, singular bool, maybeMoleculeDictionary interface{}, mods queries.Applicator) error { + var slice []*MoleculeDictionary + var object *MoleculeDictionary + + if singular { + object = maybeMoleculeDictionary.(*MoleculeDictionary) + } else { + slice = *maybeMoleculeDictionary.(*[]*MoleculeDictionary) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &moleculeDictionaryR{} + } + args = append(args, object.Molregno) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &moleculeDictionaryR{} + } + + for _, a := range args { + if a == obj.Molregno { + continue Outer + } + } + + args = append(args, obj.Molregno) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`compound_structural_alerts`), + qm.WhereIn(`compound_structural_alerts.molregno in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load compound_structural_alerts") + } + + var resultSlice []*CompoundStructuralAlert + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice compound_structural_alerts") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on compound_structural_alerts") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for compound_structural_alerts") + } + + if len(compoundStructuralAlertAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.MolregnoCompoundStructuralAlerts = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &compoundStructuralAlertR{} + } + foreign.R.MolregnoMoleculeDictionary = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.Molregno == foreign.Molregno { + local.R.MolregnoCompoundStructuralAlerts = append(local.R.MolregnoCompoundStructuralAlerts, foreign) + if foreign.R == nil { + foreign.R = &compoundStructuralAlertR{} + } + foreign.R.MolregnoMoleculeDictionary = local + break + } + } + } + + return nil +} + +// LoadMolregnoDrugIndications allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (moleculeDictionaryL) LoadMolregnoDrugIndications(ctx context.Context, e boil.ContextExecutor, singular bool, maybeMoleculeDictionary interface{}, mods queries.Applicator) error { + var slice []*MoleculeDictionary + var object *MoleculeDictionary + + if singular { + object = maybeMoleculeDictionary.(*MoleculeDictionary) + } else { + slice = *maybeMoleculeDictionary.(*[]*MoleculeDictionary) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &moleculeDictionaryR{} + } + args = append(args, object.Molregno) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &moleculeDictionaryR{} + } + + for _, a := range args { + if queries.Equal(a, obj.Molregno) { + continue Outer + } + } + + args = append(args, obj.Molregno) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`drug_indication`), + qm.WhereIn(`drug_indication.molregno in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load drug_indication") + } + + var resultSlice []*DrugIndication + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice drug_indication") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on drug_indication") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for drug_indication") + } + + if len(drugIndicationAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.MolregnoDrugIndications = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &drugIndicationR{} + } + foreign.R.MolregnoMoleculeDictionary = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.Molregno, foreign.Molregno) { + local.R.MolregnoDrugIndications = append(local.R.MolregnoDrugIndications, foreign) + if foreign.R == nil { + foreign.R = &drugIndicationR{} + } + foreign.R.MolregnoMoleculeDictionary = local + break + } + } + } + + return nil +} + +// LoadMolregnoDrugMechanisms allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (moleculeDictionaryL) LoadMolregnoDrugMechanisms(ctx context.Context, e boil.ContextExecutor, singular bool, maybeMoleculeDictionary interface{}, mods queries.Applicator) error { + var slice []*MoleculeDictionary + var object *MoleculeDictionary + + if singular { + object = maybeMoleculeDictionary.(*MoleculeDictionary) + } else { + slice = *maybeMoleculeDictionary.(*[]*MoleculeDictionary) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &moleculeDictionaryR{} + } + args = append(args, object.Molregno) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &moleculeDictionaryR{} + } + + for _, a := range args { + if queries.Equal(a, obj.Molregno) { + continue Outer + } + } + + args = append(args, obj.Molregno) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`drug_mechanism`), + qm.WhereIn(`drug_mechanism.molregno in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load drug_mechanism") + } + + var resultSlice []*DrugMechanism + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice drug_mechanism") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on drug_mechanism") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for drug_mechanism") + } + + if len(drugMechanismAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.MolregnoDrugMechanisms = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &drugMechanismR{} + } + foreign.R.MolregnoMoleculeDictionary = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.Molregno, foreign.Molregno) { + local.R.MolregnoDrugMechanisms = append(local.R.MolregnoDrugMechanisms, foreign) + if foreign.R == nil { + foreign.R = &drugMechanismR{} + } + foreign.R.MolregnoMoleculeDictionary = local + break + } + } + } + + return nil +} + +// LoadMolregnoFormulations allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (moleculeDictionaryL) LoadMolregnoFormulations(ctx context.Context, e boil.ContextExecutor, singular bool, maybeMoleculeDictionary interface{}, mods queries.Applicator) error { + var slice []*MoleculeDictionary + var object *MoleculeDictionary + + if singular { + object = maybeMoleculeDictionary.(*MoleculeDictionary) + } else { + slice = *maybeMoleculeDictionary.(*[]*MoleculeDictionary) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &moleculeDictionaryR{} + } + args = append(args, object.Molregno) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &moleculeDictionaryR{} + } + + for _, a := range args { + if queries.Equal(a, obj.Molregno) { + continue Outer + } + } + + args = append(args, obj.Molregno) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`formulations`), + qm.WhereIn(`formulations.molregno in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load formulations") + } + + var resultSlice []*Formulation + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice formulations") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on formulations") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for formulations") + } + + if len(formulationAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.MolregnoFormulations = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &formulationR{} + } + foreign.R.MolregnoMoleculeDictionary = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.Molregno, foreign.Molregno) { + local.R.MolregnoFormulations = append(local.R.MolregnoFormulations, foreign) + if foreign.R == nil { + foreign.R = &formulationR{} + } + foreign.R.MolregnoMoleculeDictionary = local + break + } + } + } + + return nil +} + +// LoadMolregnoMoleculeAtcClassifications allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (moleculeDictionaryL) LoadMolregnoMoleculeAtcClassifications(ctx context.Context, e boil.ContextExecutor, singular bool, maybeMoleculeDictionary interface{}, mods queries.Applicator) error { + var slice []*MoleculeDictionary + var object *MoleculeDictionary + + if singular { + object = maybeMoleculeDictionary.(*MoleculeDictionary) + } else { + slice = *maybeMoleculeDictionary.(*[]*MoleculeDictionary) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &moleculeDictionaryR{} + } + args = append(args, object.Molregno) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &moleculeDictionaryR{} + } + + for _, a := range args { + if a == obj.Molregno { + continue Outer + } + } + + args = append(args, obj.Molregno) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`molecule_atc_classification`), + qm.WhereIn(`molecule_atc_classification.molregno in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load molecule_atc_classification") + } + + var resultSlice []*MoleculeAtcClassification + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice molecule_atc_classification") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on molecule_atc_classification") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for molecule_atc_classification") + } + + if len(moleculeAtcClassificationAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.MolregnoMoleculeAtcClassifications = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &moleculeAtcClassificationR{} + } + foreign.R.MolregnoMoleculeDictionary = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.Molregno == foreign.Molregno { + local.R.MolregnoMoleculeAtcClassifications = append(local.R.MolregnoMoleculeAtcClassifications, foreign) + if foreign.R == nil { + foreign.R = &moleculeAtcClassificationR{} + } + foreign.R.MolregnoMoleculeDictionary = local + break + } + } + } + + return nil +} + +// LoadMolregnoMoleculeFracClassifications allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (moleculeDictionaryL) LoadMolregnoMoleculeFracClassifications(ctx context.Context, e boil.ContextExecutor, singular bool, maybeMoleculeDictionary interface{}, mods queries.Applicator) error { + var slice []*MoleculeDictionary + var object *MoleculeDictionary + + if singular { + object = maybeMoleculeDictionary.(*MoleculeDictionary) + } else { + slice = *maybeMoleculeDictionary.(*[]*MoleculeDictionary) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &moleculeDictionaryR{} + } + args = append(args, object.Molregno) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &moleculeDictionaryR{} + } + + for _, a := range args { + if a == obj.Molregno { + continue Outer + } + } + + args = append(args, obj.Molregno) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`molecule_frac_classification`), + qm.WhereIn(`molecule_frac_classification.molregno in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load molecule_frac_classification") + } + + var resultSlice []*MoleculeFracClassification + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice molecule_frac_classification") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on molecule_frac_classification") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for molecule_frac_classification") + } + + if len(moleculeFracClassificationAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.MolregnoMoleculeFracClassifications = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &moleculeFracClassificationR{} + } + foreign.R.MolregnoMoleculeDictionary = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.Molregno == foreign.Molregno { + local.R.MolregnoMoleculeFracClassifications = append(local.R.MolregnoMoleculeFracClassifications, foreign) + if foreign.R == nil { + foreign.R = &moleculeFracClassificationR{} + } + foreign.R.MolregnoMoleculeDictionary = local + break + } + } + } + + return nil +} + +// LoadParentMolregnoMoleculeHierarchies allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (moleculeDictionaryL) LoadParentMolregnoMoleculeHierarchies(ctx context.Context, e boil.ContextExecutor, singular bool, maybeMoleculeDictionary interface{}, mods queries.Applicator) error { + var slice []*MoleculeDictionary + var object *MoleculeDictionary + + if singular { + object = maybeMoleculeDictionary.(*MoleculeDictionary) + } else { + slice = *maybeMoleculeDictionary.(*[]*MoleculeDictionary) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &moleculeDictionaryR{} + } + args = append(args, object.Molregno) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &moleculeDictionaryR{} + } + + for _, a := range args { + if queries.Equal(a, obj.Molregno) { + continue Outer + } + } + + args = append(args, obj.Molregno) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`molecule_hierarchy`), + qm.WhereIn(`molecule_hierarchy.parent_molregno in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load molecule_hierarchy") + } + + var resultSlice []*MoleculeHierarchy + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice molecule_hierarchy") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on molecule_hierarchy") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for molecule_hierarchy") + } + + if len(moleculeHierarchyAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.ParentMolregnoMoleculeHierarchies = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &moleculeHierarchyR{} + } + foreign.R.ParentMolregnoMoleculeDictionary = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.Molregno, foreign.ParentMolregno) { + local.R.ParentMolregnoMoleculeHierarchies = append(local.R.ParentMolregnoMoleculeHierarchies, foreign) + if foreign.R == nil { + foreign.R = &moleculeHierarchyR{} + } + foreign.R.ParentMolregnoMoleculeDictionary = local + break + } + } + } + + return nil +} + +// LoadActiveMolregnoMoleculeHierarchies allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (moleculeDictionaryL) LoadActiveMolregnoMoleculeHierarchies(ctx context.Context, e boil.ContextExecutor, singular bool, maybeMoleculeDictionary interface{}, mods queries.Applicator) error { + var slice []*MoleculeDictionary + var object *MoleculeDictionary + + if singular { + object = maybeMoleculeDictionary.(*MoleculeDictionary) + } else { + slice = *maybeMoleculeDictionary.(*[]*MoleculeDictionary) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &moleculeDictionaryR{} + } + args = append(args, object.Molregno) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &moleculeDictionaryR{} + } + + for _, a := range args { + if queries.Equal(a, obj.Molregno) { + continue Outer + } + } + + args = append(args, obj.Molregno) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`molecule_hierarchy`), + qm.WhereIn(`molecule_hierarchy.active_molregno in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load molecule_hierarchy") + } + + var resultSlice []*MoleculeHierarchy + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice molecule_hierarchy") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on molecule_hierarchy") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for molecule_hierarchy") + } + + if len(moleculeHierarchyAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.ActiveMolregnoMoleculeHierarchies = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &moleculeHierarchyR{} + } + foreign.R.ActiveMolregnoMoleculeDictionary = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.Molregno, foreign.ActiveMolregno) { + local.R.ActiveMolregnoMoleculeHierarchies = append(local.R.ActiveMolregnoMoleculeHierarchies, foreign) + if foreign.R == nil { + foreign.R = &moleculeHierarchyR{} + } + foreign.R.ActiveMolregnoMoleculeDictionary = local + break + } + } + } + + return nil +} + +// LoadMolregnoMoleculeHracClassifications allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (moleculeDictionaryL) LoadMolregnoMoleculeHracClassifications(ctx context.Context, e boil.ContextExecutor, singular bool, maybeMoleculeDictionary interface{}, mods queries.Applicator) error { + var slice []*MoleculeDictionary + var object *MoleculeDictionary + + if singular { + object = maybeMoleculeDictionary.(*MoleculeDictionary) + } else { + slice = *maybeMoleculeDictionary.(*[]*MoleculeDictionary) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &moleculeDictionaryR{} + } + args = append(args, object.Molregno) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &moleculeDictionaryR{} + } + + for _, a := range args { + if a == obj.Molregno { + continue Outer + } + } + + args = append(args, obj.Molregno) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`molecule_hrac_classification`), + qm.WhereIn(`molecule_hrac_classification.molregno in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load molecule_hrac_classification") + } + + var resultSlice []*MoleculeHracClassification + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice molecule_hrac_classification") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on molecule_hrac_classification") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for molecule_hrac_classification") + } + + if len(moleculeHracClassificationAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.MolregnoMoleculeHracClassifications = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &moleculeHracClassificationR{} + } + foreign.R.MolregnoMoleculeDictionary = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.Molregno == foreign.Molregno { + local.R.MolregnoMoleculeHracClassifications = append(local.R.MolregnoMoleculeHracClassifications, foreign) + if foreign.R == nil { + foreign.R = &moleculeHracClassificationR{} + } + foreign.R.MolregnoMoleculeDictionary = local + break + } + } + } + + return nil +} + +// LoadMolregnoMoleculeIracClassifications allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (moleculeDictionaryL) LoadMolregnoMoleculeIracClassifications(ctx context.Context, e boil.ContextExecutor, singular bool, maybeMoleculeDictionary interface{}, mods queries.Applicator) error { + var slice []*MoleculeDictionary + var object *MoleculeDictionary + + if singular { + object = maybeMoleculeDictionary.(*MoleculeDictionary) + } else { + slice = *maybeMoleculeDictionary.(*[]*MoleculeDictionary) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &moleculeDictionaryR{} + } + args = append(args, object.Molregno) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &moleculeDictionaryR{} + } + + for _, a := range args { + if a == obj.Molregno { + continue Outer + } + } + + args = append(args, obj.Molregno) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`molecule_irac_classification`), + qm.WhereIn(`molecule_irac_classification.molregno in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load molecule_irac_classification") + } + + var resultSlice []*MoleculeIracClassification + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice molecule_irac_classification") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on molecule_irac_classification") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for molecule_irac_classification") + } + + if len(moleculeIracClassificationAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.MolregnoMoleculeIracClassifications = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &moleculeIracClassificationR{} + } + foreign.R.MolregnoMoleculeDictionary = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.Molregno == foreign.Molregno { + local.R.MolregnoMoleculeIracClassifications = append(local.R.MolregnoMoleculeIracClassifications, foreign) + if foreign.R == nil { + foreign.R = &moleculeIracClassificationR{} + } + foreign.R.MolregnoMoleculeDictionary = local + break + } + } + } + + return nil +} + +// LoadMolregnoMoleculeSynonyms allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (moleculeDictionaryL) LoadMolregnoMoleculeSynonyms(ctx context.Context, e boil.ContextExecutor, singular bool, maybeMoleculeDictionary interface{}, mods queries.Applicator) error { + var slice []*MoleculeDictionary + var object *MoleculeDictionary + + if singular { + object = maybeMoleculeDictionary.(*MoleculeDictionary) + } else { + slice = *maybeMoleculeDictionary.(*[]*MoleculeDictionary) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &moleculeDictionaryR{} + } + args = append(args, object.Molregno) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &moleculeDictionaryR{} + } + + for _, a := range args { + if a == obj.Molregno { + continue Outer + } + } + + args = append(args, obj.Molregno) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`molecule_synonyms`), + qm.WhereIn(`molecule_synonyms.molregno in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load molecule_synonyms") + } + + var resultSlice []*MoleculeSynonym + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice molecule_synonyms") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on molecule_synonyms") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for molecule_synonyms") + } + + if len(moleculeSynonymAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.MolregnoMoleculeSynonyms = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &moleculeSynonymR{} + } + foreign.R.MolregnoMoleculeDictionary = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.Molregno == foreign.Molregno { + local.R.MolregnoMoleculeSynonyms = append(local.R.MolregnoMoleculeSynonyms, foreign) + if foreign.R == nil { + foreign.R = &moleculeSynonymR{} + } + foreign.R.MolregnoMoleculeDictionary = local + break + } + } + } + + return nil +} + +// SetChembl of the moleculeDictionary to the related item. +// Sets o.R.Chembl to related. +// Adds o to related.R.ChemblMoleculeDictionary. +func (o *MoleculeDictionary) SetChembl(ctx context.Context, exec boil.ContextExecutor, insert bool, related *ChemblIDLookup) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"molecule_dictionary\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"chembl_id"}), + strmangle.WhereClause("\"", "\"", 0, moleculeDictionaryPrimaryKeyColumns), + ) + values := []interface{}{related.ChemblID, o.Molregno} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.ChemblID = related.ChemblID + if o.R == nil { + o.R = &moleculeDictionaryR{ + Chembl: related, + } + } else { + o.R.Chembl = related + } + + if related.R == nil { + related.R = &chemblIDLookupR{ + ChemblMoleculeDictionary: o, + } + } else { + related.R.ChemblMoleculeDictionary = o + } + + return nil +} + +// SetMolregnoBiotherapeutic of the moleculeDictionary to the related item. +// Sets o.R.MolregnoBiotherapeutic to related. +// Adds o to related.R.MolregnoMoleculeDictionary. +func (o *MoleculeDictionary) SetMolregnoBiotherapeutic(ctx context.Context, exec boil.ContextExecutor, insert bool, related *Biotherapeutic) error { + var err error + + if insert { + related.Molregno = o.Molregno + + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"biotherapeutics\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"molregno"}), + strmangle.WhereClause("\"", "\"", 0, biotherapeuticPrimaryKeyColumns), + ) + values := []interface{}{o.Molregno, related.Molregno} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + related.Molregno = o.Molregno + } + + if o.R == nil { + o.R = &moleculeDictionaryR{ + MolregnoBiotherapeutic: related, + } + } else { + o.R.MolregnoBiotherapeutic = related + } + + if related.R == nil { + related.R = &biotherapeuticR{ + MolregnoMoleculeDictionary: o, + } + } else { + related.R.MolregnoMoleculeDictionary = o + } + return nil +} + +// SetMolregnoCompoundProperty of the moleculeDictionary to the related item. +// Sets o.R.MolregnoCompoundProperty to related. +// Adds o to related.R.MolregnoMoleculeDictionary. +func (o *MoleculeDictionary) SetMolregnoCompoundProperty(ctx context.Context, exec boil.ContextExecutor, insert bool, related *CompoundProperty) error { + var err error + + if insert { + related.Molregno = o.Molregno + + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"compound_properties\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"molregno"}), + strmangle.WhereClause("\"", "\"", 0, compoundPropertyPrimaryKeyColumns), + ) + values := []interface{}{o.Molregno, related.Molregno} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + related.Molregno = o.Molregno + } + + if o.R == nil { + o.R = &moleculeDictionaryR{ + MolregnoCompoundProperty: related, + } + } else { + o.R.MolregnoCompoundProperty = related + } + + if related.R == nil { + related.R = &compoundPropertyR{ + MolregnoMoleculeDictionary: o, + } + } else { + related.R.MolregnoMoleculeDictionary = o + } + return nil +} + +// SetMolregnoCompoundStructure of the moleculeDictionary to the related item. +// Sets o.R.MolregnoCompoundStructure to related. +// Adds o to related.R.MolregnoMoleculeDictionary. +func (o *MoleculeDictionary) SetMolregnoCompoundStructure(ctx context.Context, exec boil.ContextExecutor, insert bool, related *CompoundStructure) error { + var err error + + if insert { + related.Molregno = o.Molregno + + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"compound_structures\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"molregno"}), + strmangle.WhereClause("\"", "\"", 0, compoundStructurePrimaryKeyColumns), + ) + values := []interface{}{o.Molregno, related.Molregno} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + related.Molregno = o.Molregno + } + + if o.R == nil { + o.R = &moleculeDictionaryR{ + MolregnoCompoundStructure: related, + } + } else { + o.R.MolregnoCompoundStructure = related + } + + if related.R == nil { + related.R = &compoundStructureR{ + MolregnoMoleculeDictionary: o, + } + } else { + related.R.MolregnoMoleculeDictionary = o + } + return nil +} + +// SetMolregnoMoleculeHierarchy of the moleculeDictionary to the related item. +// Sets o.R.MolregnoMoleculeHierarchy to related. +// Adds o to related.R.MolregnoMoleculeDictionary. +func (o *MoleculeDictionary) SetMolregnoMoleculeHierarchy(ctx context.Context, exec boil.ContextExecutor, insert bool, related *MoleculeHierarchy) error { + var err error + + if insert { + related.Molregno = o.Molregno + + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"molecule_hierarchy\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"molregno"}), + strmangle.WhereClause("\"", "\"", 0, moleculeHierarchyPrimaryKeyColumns), + ) + values := []interface{}{o.Molregno, related.Molregno} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + related.Molregno = o.Molregno + } + + if o.R == nil { + o.R = &moleculeDictionaryR{ + MolregnoMoleculeHierarchy: related, + } + } else { + o.R.MolregnoMoleculeHierarchy = related + } + + if related.R == nil { + related.R = &moleculeHierarchyR{ + MolregnoMoleculeDictionary: o, + } + } else { + related.R.MolregnoMoleculeDictionary = o + } + return nil +} + +// AddMolregnoActivities adds the given related objects to the existing relationships +// of the molecule_dictionary, optionally inserting them as new records. +// Appends related to o.R.MolregnoActivities. +// Sets related.R.MolregnoMoleculeDictionary appropriately. +func (o *MoleculeDictionary) AddMolregnoActivities(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Activity) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.Molregno, o.Molregno) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"activities\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"molregno"}), + strmangle.WhereClause("\"", "\"", 0, activityPrimaryKeyColumns), + ) + values := []interface{}{o.Molregno, rel.ActivityID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.Molregno, o.Molregno) + } + } + + if o.R == nil { + o.R = &moleculeDictionaryR{ + MolregnoActivities: related, + } + } else { + o.R.MolregnoActivities = append(o.R.MolregnoActivities, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &activityR{ + MolregnoMoleculeDictionary: o, + } + } else { + rel.R.MolregnoMoleculeDictionary = o + } + } + return nil +} + +// SetMolregnoActivities removes all previously related items of the +// molecule_dictionary replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.MolregnoMoleculeDictionary's MolregnoActivities accordingly. +// Replaces o.R.MolregnoActivities with related. +// Sets related.R.MolregnoMoleculeDictionary's MolregnoActivities accordingly. +func (o *MoleculeDictionary) SetMolregnoActivities(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Activity) error { + query := "update \"activities\" set \"molregno\" = null where \"molregno\" = ?" + values := []interface{}{o.Molregno} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + if o.R != nil { + for _, rel := range o.R.MolregnoActivities { + queries.SetScanner(&rel.Molregno, nil) + if rel.R == nil { + continue + } + + rel.R.MolregnoMoleculeDictionary = nil + } + o.R.MolregnoActivities = nil + } + + return o.AddMolregnoActivities(ctx, exec, insert, related...) +} + +// RemoveMolregnoActivities relationships from objects passed in. +// Removes related items from R.MolregnoActivities (uses pointer comparison, removal does not keep order) +// Sets related.R.MolregnoMoleculeDictionary. +func (o *MoleculeDictionary) RemoveMolregnoActivities(ctx context.Context, exec boil.ContextExecutor, related ...*Activity) error { + if len(related) == 0 { + return nil + } + + var err error + for _, rel := range related { + queries.SetScanner(&rel.Molregno, nil) + if rel.R != nil { + rel.R.MolregnoMoleculeDictionary = nil + } + if _, err = rel.Update(ctx, exec, boil.Whitelist("molregno")); err != nil { + return err + } + } + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.MolregnoActivities { + if rel != ri { + continue + } + + ln := len(o.R.MolregnoActivities) + if ln > 1 && i < ln-1 { + o.R.MolregnoActivities[i] = o.R.MolregnoActivities[ln-1] + } + o.R.MolregnoActivities = o.R.MolregnoActivities[:ln-1] + break + } + } + + return nil +} + +// AddMolregnoCompoundRecords adds the given related objects to the existing relationships +// of the molecule_dictionary, optionally inserting them as new records. +// Appends related to o.R.MolregnoCompoundRecords. +// Sets related.R.MolregnoMoleculeDictionary appropriately. +func (o *MoleculeDictionary) AddMolregnoCompoundRecords(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*CompoundRecord) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.Molregno, o.Molregno) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"compound_records\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"molregno"}), + strmangle.WhereClause("\"", "\"", 0, compoundRecordPrimaryKeyColumns), + ) + values := []interface{}{o.Molregno, rel.RecordID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.Molregno, o.Molregno) + } + } + + if o.R == nil { + o.R = &moleculeDictionaryR{ + MolregnoCompoundRecords: related, + } + } else { + o.R.MolregnoCompoundRecords = append(o.R.MolregnoCompoundRecords, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &compoundRecordR{ + MolregnoMoleculeDictionary: o, + } + } else { + rel.R.MolregnoMoleculeDictionary = o + } + } + return nil +} + +// SetMolregnoCompoundRecords removes all previously related items of the +// molecule_dictionary replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.MolregnoMoleculeDictionary's MolregnoCompoundRecords accordingly. +// Replaces o.R.MolregnoCompoundRecords with related. +// Sets related.R.MolregnoMoleculeDictionary's MolregnoCompoundRecords accordingly. +func (o *MoleculeDictionary) SetMolregnoCompoundRecords(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*CompoundRecord) error { + query := "update \"compound_records\" set \"molregno\" = null where \"molregno\" = ?" + values := []interface{}{o.Molregno} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + if o.R != nil { + for _, rel := range o.R.MolregnoCompoundRecords { + queries.SetScanner(&rel.Molregno, nil) + if rel.R == nil { + continue + } + + rel.R.MolregnoMoleculeDictionary = nil + } + o.R.MolregnoCompoundRecords = nil + } + + return o.AddMolregnoCompoundRecords(ctx, exec, insert, related...) +} + +// RemoveMolregnoCompoundRecords relationships from objects passed in. +// Removes related items from R.MolregnoCompoundRecords (uses pointer comparison, removal does not keep order) +// Sets related.R.MolregnoMoleculeDictionary. +func (o *MoleculeDictionary) RemoveMolregnoCompoundRecords(ctx context.Context, exec boil.ContextExecutor, related ...*CompoundRecord) error { + if len(related) == 0 { + return nil + } + + var err error + for _, rel := range related { + queries.SetScanner(&rel.Molregno, nil) + if rel.R != nil { + rel.R.MolregnoMoleculeDictionary = nil + } + if _, err = rel.Update(ctx, exec, boil.Whitelist("molregno")); err != nil { + return err + } + } + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.MolregnoCompoundRecords { + if rel != ri { + continue + } + + ln := len(o.R.MolregnoCompoundRecords) + if ln > 1 && i < ln-1 { + o.R.MolregnoCompoundRecords[i] = o.R.MolregnoCompoundRecords[ln-1] + } + o.R.MolregnoCompoundRecords = o.R.MolregnoCompoundRecords[:ln-1] + break + } + } + + return nil +} + +// AddMolregnoCompoundStructuralAlerts adds the given related objects to the existing relationships +// of the molecule_dictionary, optionally inserting them as new records. +// Appends related to o.R.MolregnoCompoundStructuralAlerts. +// Sets related.R.MolregnoMoleculeDictionary appropriately. +func (o *MoleculeDictionary) AddMolregnoCompoundStructuralAlerts(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*CompoundStructuralAlert) error { + var err error + for _, rel := range related { + if insert { + rel.Molregno = o.Molregno + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"compound_structural_alerts\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"molregno"}), + strmangle.WhereClause("\"", "\"", 0, compoundStructuralAlertPrimaryKeyColumns), + ) + values := []interface{}{o.Molregno, rel.CPDSTRAlertID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.Molregno = o.Molregno + } + } + + if o.R == nil { + o.R = &moleculeDictionaryR{ + MolregnoCompoundStructuralAlerts: related, + } + } else { + o.R.MolregnoCompoundStructuralAlerts = append(o.R.MolregnoCompoundStructuralAlerts, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &compoundStructuralAlertR{ + MolregnoMoleculeDictionary: o, + } + } else { + rel.R.MolregnoMoleculeDictionary = o + } + } + return nil +} + +// AddMolregnoDrugIndications adds the given related objects to the existing relationships +// of the molecule_dictionary, optionally inserting them as new records. +// Appends related to o.R.MolregnoDrugIndications. +// Sets related.R.MolregnoMoleculeDictionary appropriately. +func (o *MoleculeDictionary) AddMolregnoDrugIndications(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*DrugIndication) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.Molregno, o.Molregno) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"drug_indication\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"molregno"}), + strmangle.WhereClause("\"", "\"", 0, drugIndicationPrimaryKeyColumns), + ) + values := []interface{}{o.Molregno, rel.DrugindID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.Molregno, o.Molregno) + } + } + + if o.R == nil { + o.R = &moleculeDictionaryR{ + MolregnoDrugIndications: related, + } + } else { + o.R.MolregnoDrugIndications = append(o.R.MolregnoDrugIndications, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &drugIndicationR{ + MolregnoMoleculeDictionary: o, + } + } else { + rel.R.MolregnoMoleculeDictionary = o + } + } + return nil +} + +// SetMolregnoDrugIndications removes all previously related items of the +// molecule_dictionary replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.MolregnoMoleculeDictionary's MolregnoDrugIndications accordingly. +// Replaces o.R.MolregnoDrugIndications with related. +// Sets related.R.MolregnoMoleculeDictionary's MolregnoDrugIndications accordingly. +func (o *MoleculeDictionary) SetMolregnoDrugIndications(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*DrugIndication) error { + query := "update \"drug_indication\" set \"molregno\" = null where \"molregno\" = ?" + values := []interface{}{o.Molregno} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + if o.R != nil { + for _, rel := range o.R.MolregnoDrugIndications { + queries.SetScanner(&rel.Molregno, nil) + if rel.R == nil { + continue + } + + rel.R.MolregnoMoleculeDictionary = nil + } + o.R.MolregnoDrugIndications = nil + } + + return o.AddMolregnoDrugIndications(ctx, exec, insert, related...) +} + +// RemoveMolregnoDrugIndications relationships from objects passed in. +// Removes related items from R.MolregnoDrugIndications (uses pointer comparison, removal does not keep order) +// Sets related.R.MolregnoMoleculeDictionary. +func (o *MoleculeDictionary) RemoveMolregnoDrugIndications(ctx context.Context, exec boil.ContextExecutor, related ...*DrugIndication) error { + if len(related) == 0 { + return nil + } + + var err error + for _, rel := range related { + queries.SetScanner(&rel.Molregno, nil) + if rel.R != nil { + rel.R.MolregnoMoleculeDictionary = nil + } + if _, err = rel.Update(ctx, exec, boil.Whitelist("molregno")); err != nil { + return err + } + } + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.MolregnoDrugIndications { + if rel != ri { + continue + } + + ln := len(o.R.MolregnoDrugIndications) + if ln > 1 && i < ln-1 { + o.R.MolregnoDrugIndications[i] = o.R.MolregnoDrugIndications[ln-1] + } + o.R.MolregnoDrugIndications = o.R.MolregnoDrugIndications[:ln-1] + break + } + } + + return nil +} + +// AddMolregnoDrugMechanisms adds the given related objects to the existing relationships +// of the molecule_dictionary, optionally inserting them as new records. +// Appends related to o.R.MolregnoDrugMechanisms. +// Sets related.R.MolregnoMoleculeDictionary appropriately. +func (o *MoleculeDictionary) AddMolregnoDrugMechanisms(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*DrugMechanism) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.Molregno, o.Molregno) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"drug_mechanism\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"molregno"}), + strmangle.WhereClause("\"", "\"", 0, drugMechanismPrimaryKeyColumns), + ) + values := []interface{}{o.Molregno, rel.MecID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.Molregno, o.Molregno) + } + } + + if o.R == nil { + o.R = &moleculeDictionaryR{ + MolregnoDrugMechanisms: related, + } + } else { + o.R.MolregnoDrugMechanisms = append(o.R.MolregnoDrugMechanisms, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &drugMechanismR{ + MolregnoMoleculeDictionary: o, + } + } else { + rel.R.MolregnoMoleculeDictionary = o + } + } + return nil +} + +// SetMolregnoDrugMechanisms removes all previously related items of the +// molecule_dictionary replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.MolregnoMoleculeDictionary's MolregnoDrugMechanisms accordingly. +// Replaces o.R.MolregnoDrugMechanisms with related. +// Sets related.R.MolregnoMoleculeDictionary's MolregnoDrugMechanisms accordingly. +func (o *MoleculeDictionary) SetMolregnoDrugMechanisms(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*DrugMechanism) error { + query := "update \"drug_mechanism\" set \"molregno\" = null where \"molregno\" = ?" + values := []interface{}{o.Molregno} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + if o.R != nil { + for _, rel := range o.R.MolregnoDrugMechanisms { + queries.SetScanner(&rel.Molregno, nil) + if rel.R == nil { + continue + } + + rel.R.MolregnoMoleculeDictionary = nil + } + o.R.MolregnoDrugMechanisms = nil + } + + return o.AddMolregnoDrugMechanisms(ctx, exec, insert, related...) +} + +// RemoveMolregnoDrugMechanisms relationships from objects passed in. +// Removes related items from R.MolregnoDrugMechanisms (uses pointer comparison, removal does not keep order) +// Sets related.R.MolregnoMoleculeDictionary. +func (o *MoleculeDictionary) RemoveMolregnoDrugMechanisms(ctx context.Context, exec boil.ContextExecutor, related ...*DrugMechanism) error { + if len(related) == 0 { + return nil + } + + var err error + for _, rel := range related { + queries.SetScanner(&rel.Molregno, nil) + if rel.R != nil { + rel.R.MolregnoMoleculeDictionary = nil + } + if _, err = rel.Update(ctx, exec, boil.Whitelist("molregno")); err != nil { + return err + } + } + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.MolregnoDrugMechanisms { + if rel != ri { + continue + } + + ln := len(o.R.MolregnoDrugMechanisms) + if ln > 1 && i < ln-1 { + o.R.MolregnoDrugMechanisms[i] = o.R.MolregnoDrugMechanisms[ln-1] + } + o.R.MolregnoDrugMechanisms = o.R.MolregnoDrugMechanisms[:ln-1] + break + } + } + + return nil +} + +// AddMolregnoFormulations adds the given related objects to the existing relationships +// of the molecule_dictionary, optionally inserting them as new records. +// Appends related to o.R.MolregnoFormulations. +// Sets related.R.MolregnoMoleculeDictionary appropriately. +func (o *MoleculeDictionary) AddMolregnoFormulations(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Formulation) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.Molregno, o.Molregno) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"formulations\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"molregno"}), + strmangle.WhereClause("\"", "\"", 0, formulationPrimaryKeyColumns), + ) + values := []interface{}{o.Molregno, rel.FormulationID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.Molregno, o.Molregno) + } + } + + if o.R == nil { + o.R = &moleculeDictionaryR{ + MolregnoFormulations: related, + } + } else { + o.R.MolregnoFormulations = append(o.R.MolregnoFormulations, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &formulationR{ + MolregnoMoleculeDictionary: o, + } + } else { + rel.R.MolregnoMoleculeDictionary = o + } + } + return nil +} + +// SetMolregnoFormulations removes all previously related items of the +// molecule_dictionary replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.MolregnoMoleculeDictionary's MolregnoFormulations accordingly. +// Replaces o.R.MolregnoFormulations with related. +// Sets related.R.MolregnoMoleculeDictionary's MolregnoFormulations accordingly. +func (o *MoleculeDictionary) SetMolregnoFormulations(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Formulation) error { + query := "update \"formulations\" set \"molregno\" = null where \"molregno\" = ?" + values := []interface{}{o.Molregno} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + if o.R != nil { + for _, rel := range o.R.MolregnoFormulations { + queries.SetScanner(&rel.Molregno, nil) + if rel.R == nil { + continue + } + + rel.R.MolregnoMoleculeDictionary = nil + } + o.R.MolregnoFormulations = nil + } + + return o.AddMolregnoFormulations(ctx, exec, insert, related...) +} + +// RemoveMolregnoFormulations relationships from objects passed in. +// Removes related items from R.MolregnoFormulations (uses pointer comparison, removal does not keep order) +// Sets related.R.MolregnoMoleculeDictionary. +func (o *MoleculeDictionary) RemoveMolregnoFormulations(ctx context.Context, exec boil.ContextExecutor, related ...*Formulation) error { + if len(related) == 0 { + return nil + } + + var err error + for _, rel := range related { + queries.SetScanner(&rel.Molregno, nil) + if rel.R != nil { + rel.R.MolregnoMoleculeDictionary = nil + } + if _, err = rel.Update(ctx, exec, boil.Whitelist("molregno")); err != nil { + return err + } + } + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.MolregnoFormulations { + if rel != ri { + continue + } + + ln := len(o.R.MolregnoFormulations) + if ln > 1 && i < ln-1 { + o.R.MolregnoFormulations[i] = o.R.MolregnoFormulations[ln-1] + } + o.R.MolregnoFormulations = o.R.MolregnoFormulations[:ln-1] + break + } + } + + return nil +} + +// AddMolregnoMoleculeAtcClassifications adds the given related objects to the existing relationships +// of the molecule_dictionary, optionally inserting them as new records. +// Appends related to o.R.MolregnoMoleculeAtcClassifications. +// Sets related.R.MolregnoMoleculeDictionary appropriately. +func (o *MoleculeDictionary) AddMolregnoMoleculeAtcClassifications(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*MoleculeAtcClassification) error { + var err error + for _, rel := range related { + if insert { + rel.Molregno = o.Molregno + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"molecule_atc_classification\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"molregno"}), + strmangle.WhereClause("\"", "\"", 0, moleculeAtcClassificationPrimaryKeyColumns), + ) + values := []interface{}{o.Molregno, rel.MolAtcID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.Molregno = o.Molregno + } + } + + if o.R == nil { + o.R = &moleculeDictionaryR{ + MolregnoMoleculeAtcClassifications: related, + } + } else { + o.R.MolregnoMoleculeAtcClassifications = append(o.R.MolregnoMoleculeAtcClassifications, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &moleculeAtcClassificationR{ + MolregnoMoleculeDictionary: o, + } + } else { + rel.R.MolregnoMoleculeDictionary = o + } + } + return nil +} + +// AddMolregnoMoleculeFracClassifications adds the given related objects to the existing relationships +// of the molecule_dictionary, optionally inserting them as new records. +// Appends related to o.R.MolregnoMoleculeFracClassifications. +// Sets related.R.MolregnoMoleculeDictionary appropriately. +func (o *MoleculeDictionary) AddMolregnoMoleculeFracClassifications(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*MoleculeFracClassification) error { + var err error + for _, rel := range related { + if insert { + rel.Molregno = o.Molregno + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"molecule_frac_classification\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"molregno"}), + strmangle.WhereClause("\"", "\"", 0, moleculeFracClassificationPrimaryKeyColumns), + ) + values := []interface{}{o.Molregno, rel.MolFracID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.Molregno = o.Molregno + } + } + + if o.R == nil { + o.R = &moleculeDictionaryR{ + MolregnoMoleculeFracClassifications: related, + } + } else { + o.R.MolregnoMoleculeFracClassifications = append(o.R.MolregnoMoleculeFracClassifications, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &moleculeFracClassificationR{ + MolregnoMoleculeDictionary: o, + } + } else { + rel.R.MolregnoMoleculeDictionary = o + } + } + return nil +} + +// AddParentMolregnoMoleculeHierarchies adds the given related objects to the existing relationships +// of the molecule_dictionary, optionally inserting them as new records. +// Appends related to o.R.ParentMolregnoMoleculeHierarchies. +// Sets related.R.ParentMolregnoMoleculeDictionary appropriately. +func (o *MoleculeDictionary) AddParentMolregnoMoleculeHierarchies(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*MoleculeHierarchy) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.ParentMolregno, o.Molregno) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"molecule_hierarchy\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"parent_molregno"}), + strmangle.WhereClause("\"", "\"", 0, moleculeHierarchyPrimaryKeyColumns), + ) + values := []interface{}{o.Molregno, rel.Molregno} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.ParentMolregno, o.Molregno) + } + } + + if o.R == nil { + o.R = &moleculeDictionaryR{ + ParentMolregnoMoleculeHierarchies: related, + } + } else { + o.R.ParentMolregnoMoleculeHierarchies = append(o.R.ParentMolregnoMoleculeHierarchies, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &moleculeHierarchyR{ + ParentMolregnoMoleculeDictionary: o, + } + } else { + rel.R.ParentMolregnoMoleculeDictionary = o + } + } + return nil +} + +// SetParentMolregnoMoleculeHierarchies removes all previously related items of the +// molecule_dictionary replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.ParentMolregnoMoleculeDictionary's ParentMolregnoMoleculeHierarchies accordingly. +// Replaces o.R.ParentMolregnoMoleculeHierarchies with related. +// Sets related.R.ParentMolregnoMoleculeDictionary's ParentMolregnoMoleculeHierarchies accordingly. +func (o *MoleculeDictionary) SetParentMolregnoMoleculeHierarchies(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*MoleculeHierarchy) error { + query := "update \"molecule_hierarchy\" set \"parent_molregno\" = null where \"parent_molregno\" = ?" + values := []interface{}{o.Molregno} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + if o.R != nil { + for _, rel := range o.R.ParentMolregnoMoleculeHierarchies { + queries.SetScanner(&rel.ParentMolregno, nil) + if rel.R == nil { + continue + } + + rel.R.ParentMolregnoMoleculeDictionary = nil + } + o.R.ParentMolregnoMoleculeHierarchies = nil + } + + return o.AddParentMolregnoMoleculeHierarchies(ctx, exec, insert, related...) +} + +// RemoveParentMolregnoMoleculeHierarchies relationships from objects passed in. +// Removes related items from R.ParentMolregnoMoleculeHierarchies (uses pointer comparison, removal does not keep order) +// Sets related.R.ParentMolregnoMoleculeDictionary. +func (o *MoleculeDictionary) RemoveParentMolregnoMoleculeHierarchies(ctx context.Context, exec boil.ContextExecutor, related ...*MoleculeHierarchy) error { + if len(related) == 0 { + return nil + } + + var err error + for _, rel := range related { + queries.SetScanner(&rel.ParentMolregno, nil) + if rel.R != nil { + rel.R.ParentMolregnoMoleculeDictionary = nil + } + if _, err = rel.Update(ctx, exec, boil.Whitelist("parent_molregno")); err != nil { + return err + } + } + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.ParentMolregnoMoleculeHierarchies { + if rel != ri { + continue + } + + ln := len(o.R.ParentMolregnoMoleculeHierarchies) + if ln > 1 && i < ln-1 { + o.R.ParentMolregnoMoleculeHierarchies[i] = o.R.ParentMolregnoMoleculeHierarchies[ln-1] + } + o.R.ParentMolregnoMoleculeHierarchies = o.R.ParentMolregnoMoleculeHierarchies[:ln-1] + break + } + } + + return nil +} + +// AddActiveMolregnoMoleculeHierarchies adds the given related objects to the existing relationships +// of the molecule_dictionary, optionally inserting them as new records. +// Appends related to o.R.ActiveMolregnoMoleculeHierarchies. +// Sets related.R.ActiveMolregnoMoleculeDictionary appropriately. +func (o *MoleculeDictionary) AddActiveMolregnoMoleculeHierarchies(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*MoleculeHierarchy) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.ActiveMolregno, o.Molregno) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"molecule_hierarchy\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"active_molregno"}), + strmangle.WhereClause("\"", "\"", 0, moleculeHierarchyPrimaryKeyColumns), + ) + values := []interface{}{o.Molregno, rel.Molregno} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.ActiveMolregno, o.Molregno) + } + } + + if o.R == nil { + o.R = &moleculeDictionaryR{ + ActiveMolregnoMoleculeHierarchies: related, + } + } else { + o.R.ActiveMolregnoMoleculeHierarchies = append(o.R.ActiveMolregnoMoleculeHierarchies, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &moleculeHierarchyR{ + ActiveMolregnoMoleculeDictionary: o, + } + } else { + rel.R.ActiveMolregnoMoleculeDictionary = o + } + } + return nil +} + +// SetActiveMolregnoMoleculeHierarchies removes all previously related items of the +// molecule_dictionary replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.ActiveMolregnoMoleculeDictionary's ActiveMolregnoMoleculeHierarchies accordingly. +// Replaces o.R.ActiveMolregnoMoleculeHierarchies with related. +// Sets related.R.ActiveMolregnoMoleculeDictionary's ActiveMolregnoMoleculeHierarchies accordingly. +func (o *MoleculeDictionary) SetActiveMolregnoMoleculeHierarchies(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*MoleculeHierarchy) error { + query := "update \"molecule_hierarchy\" set \"active_molregno\" = null where \"active_molregno\" = ?" + values := []interface{}{o.Molregno} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + if o.R != nil { + for _, rel := range o.R.ActiveMolregnoMoleculeHierarchies { + queries.SetScanner(&rel.ActiveMolregno, nil) + if rel.R == nil { + continue + } + + rel.R.ActiveMolregnoMoleculeDictionary = nil + } + o.R.ActiveMolregnoMoleculeHierarchies = nil + } + + return o.AddActiveMolregnoMoleculeHierarchies(ctx, exec, insert, related...) +} + +// RemoveActiveMolregnoMoleculeHierarchies relationships from objects passed in. +// Removes related items from R.ActiveMolregnoMoleculeHierarchies (uses pointer comparison, removal does not keep order) +// Sets related.R.ActiveMolregnoMoleculeDictionary. +func (o *MoleculeDictionary) RemoveActiveMolregnoMoleculeHierarchies(ctx context.Context, exec boil.ContextExecutor, related ...*MoleculeHierarchy) error { + if len(related) == 0 { + return nil + } + + var err error + for _, rel := range related { + queries.SetScanner(&rel.ActiveMolregno, nil) + if rel.R != nil { + rel.R.ActiveMolregnoMoleculeDictionary = nil + } + if _, err = rel.Update(ctx, exec, boil.Whitelist("active_molregno")); err != nil { + return err + } + } + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.ActiveMolregnoMoleculeHierarchies { + if rel != ri { + continue + } + + ln := len(o.R.ActiveMolregnoMoleculeHierarchies) + if ln > 1 && i < ln-1 { + o.R.ActiveMolregnoMoleculeHierarchies[i] = o.R.ActiveMolregnoMoleculeHierarchies[ln-1] + } + o.R.ActiveMolregnoMoleculeHierarchies = o.R.ActiveMolregnoMoleculeHierarchies[:ln-1] + break + } + } + + return nil +} + +// AddMolregnoMoleculeHracClassifications adds the given related objects to the existing relationships +// of the molecule_dictionary, optionally inserting them as new records. +// Appends related to o.R.MolregnoMoleculeHracClassifications. +// Sets related.R.MolregnoMoleculeDictionary appropriately. +func (o *MoleculeDictionary) AddMolregnoMoleculeHracClassifications(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*MoleculeHracClassification) error { + var err error + for _, rel := range related { + if insert { + rel.Molregno = o.Molregno + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"molecule_hrac_classification\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"molregno"}), + strmangle.WhereClause("\"", "\"", 0, moleculeHracClassificationPrimaryKeyColumns), + ) + values := []interface{}{o.Molregno, rel.MolHracID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.Molregno = o.Molregno + } + } + + if o.R == nil { + o.R = &moleculeDictionaryR{ + MolregnoMoleculeHracClassifications: related, + } + } else { + o.R.MolregnoMoleculeHracClassifications = append(o.R.MolregnoMoleculeHracClassifications, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &moleculeHracClassificationR{ + MolregnoMoleculeDictionary: o, + } + } else { + rel.R.MolregnoMoleculeDictionary = o + } + } + return nil +} + +// AddMolregnoMoleculeIracClassifications adds the given related objects to the existing relationships +// of the molecule_dictionary, optionally inserting them as new records. +// Appends related to o.R.MolregnoMoleculeIracClassifications. +// Sets related.R.MolregnoMoleculeDictionary appropriately. +func (o *MoleculeDictionary) AddMolregnoMoleculeIracClassifications(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*MoleculeIracClassification) error { + var err error + for _, rel := range related { + if insert { + rel.Molregno = o.Molregno + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"molecule_irac_classification\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"molregno"}), + strmangle.WhereClause("\"", "\"", 0, moleculeIracClassificationPrimaryKeyColumns), + ) + values := []interface{}{o.Molregno, rel.MolIracID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.Molregno = o.Molregno + } + } + + if o.R == nil { + o.R = &moleculeDictionaryR{ + MolregnoMoleculeIracClassifications: related, + } + } else { + o.R.MolregnoMoleculeIracClassifications = append(o.R.MolregnoMoleculeIracClassifications, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &moleculeIracClassificationR{ + MolregnoMoleculeDictionary: o, + } + } else { + rel.R.MolregnoMoleculeDictionary = o + } + } + return nil +} + +// AddMolregnoMoleculeSynonyms adds the given related objects to the existing relationships +// of the molecule_dictionary, optionally inserting them as new records. +// Appends related to o.R.MolregnoMoleculeSynonyms. +// Sets related.R.MolregnoMoleculeDictionary appropriately. +func (o *MoleculeDictionary) AddMolregnoMoleculeSynonyms(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*MoleculeSynonym) error { + var err error + for _, rel := range related { + if insert { + rel.Molregno = o.Molregno + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"molecule_synonyms\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"molregno"}), + strmangle.WhereClause("\"", "\"", 0, moleculeSynonymPrimaryKeyColumns), + ) + values := []interface{}{o.Molregno, rel.MolsynID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.Molregno = o.Molregno + } + } + + if o.R == nil { + o.R = &moleculeDictionaryR{ + MolregnoMoleculeSynonyms: related, + } + } else { + o.R.MolregnoMoleculeSynonyms = append(o.R.MolregnoMoleculeSynonyms, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &moleculeSynonymR{ + MolregnoMoleculeDictionary: o, + } + } else { + rel.R.MolregnoMoleculeDictionary = o + } + } + return nil +} + +// MoleculeDictionaries retrieves all the records using an executor. +func MoleculeDictionaries(mods ...qm.QueryMod) moleculeDictionaryQuery { + mods = append(mods, qm.From("\"molecule_dictionary\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"molecule_dictionary\".*"}) + } + + return moleculeDictionaryQuery{q} +} + +// FindMoleculeDictionary retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindMoleculeDictionary(ctx context.Context, exec boil.ContextExecutor, molregno int64, selectCols ...string) (*MoleculeDictionary, error) { + moleculeDictionaryObj := &MoleculeDictionary{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"molecule_dictionary\" where \"molregno\"=?", sel, + ) + + q := queries.Raw(query, molregno) + + err := q.Bind(ctx, exec, moleculeDictionaryObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from molecule_dictionary") + } + + if err = moleculeDictionaryObj.doAfterSelectHooks(ctx, exec); err != nil { + return moleculeDictionaryObj, err + } + + return moleculeDictionaryObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *MoleculeDictionary) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no molecule_dictionary provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(moleculeDictionaryColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + moleculeDictionaryInsertCacheMut.RLock() + cache, cached := moleculeDictionaryInsertCache[key] + moleculeDictionaryInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + moleculeDictionaryAllColumns, + moleculeDictionaryColumnsWithDefault, + moleculeDictionaryColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(moleculeDictionaryType, moleculeDictionaryMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(moleculeDictionaryType, moleculeDictionaryMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"molecule_dictionary\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"molecule_dictionary\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into molecule_dictionary") + } + + if !cached { + moleculeDictionaryInsertCacheMut.Lock() + moleculeDictionaryInsertCache[key] = cache + moleculeDictionaryInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the MoleculeDictionary. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *MoleculeDictionary) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + moleculeDictionaryUpdateCacheMut.RLock() + cache, cached := moleculeDictionaryUpdateCache[key] + moleculeDictionaryUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + moleculeDictionaryAllColumns, + moleculeDictionaryPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update molecule_dictionary, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"molecule_dictionary\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, moleculeDictionaryPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(moleculeDictionaryType, moleculeDictionaryMapping, append(wl, moleculeDictionaryPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update molecule_dictionary row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for molecule_dictionary") + } + + if !cached { + moleculeDictionaryUpdateCacheMut.Lock() + moleculeDictionaryUpdateCache[key] = cache + moleculeDictionaryUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q moleculeDictionaryQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for molecule_dictionary") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for molecule_dictionary") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o MoleculeDictionarySlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), moleculeDictionaryPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"molecule_dictionary\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, moleculeDictionaryPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in moleculeDictionary slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all moleculeDictionary") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *MoleculeDictionary) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no molecule_dictionary provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(moleculeDictionaryColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + moleculeDictionaryUpsertCacheMut.RLock() + cache, cached := moleculeDictionaryUpsertCache[key] + moleculeDictionaryUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + moleculeDictionaryAllColumns, + moleculeDictionaryColumnsWithDefault, + moleculeDictionaryColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + moleculeDictionaryAllColumns, + moleculeDictionaryPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert molecule_dictionary, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(moleculeDictionaryPrimaryKeyColumns)) + copy(conflict, moleculeDictionaryPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"molecule_dictionary\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(moleculeDictionaryType, moleculeDictionaryMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(moleculeDictionaryType, moleculeDictionaryMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert molecule_dictionary") + } + + if !cached { + moleculeDictionaryUpsertCacheMut.Lock() + moleculeDictionaryUpsertCache[key] = cache + moleculeDictionaryUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single MoleculeDictionary record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *MoleculeDictionary) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no MoleculeDictionary provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), moleculeDictionaryPrimaryKeyMapping) + sql := "DELETE FROM \"molecule_dictionary\" WHERE \"molregno\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from molecule_dictionary") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for molecule_dictionary") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q moleculeDictionaryQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no moleculeDictionaryQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from molecule_dictionary") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for molecule_dictionary") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o MoleculeDictionarySlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(moleculeDictionaryBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), moleculeDictionaryPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"molecule_dictionary\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, moleculeDictionaryPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from moleculeDictionary slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for molecule_dictionary") + } + + if len(moleculeDictionaryAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *MoleculeDictionary) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindMoleculeDictionary(ctx, exec, o.Molregno) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *MoleculeDictionarySlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := MoleculeDictionarySlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), moleculeDictionaryPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"molecule_dictionary\".* FROM \"molecule_dictionary\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, moleculeDictionaryPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in MoleculeDictionarySlice") + } + + *o = slice + + return nil +} + +// MoleculeDictionaryExists checks if the MoleculeDictionary row exists. +func MoleculeDictionaryExists(ctx context.Context, exec boil.ContextExecutor, molregno int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"molecule_dictionary\" where \"molregno\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, molregno) + } + row := exec.QueryRowContext(ctx, sql, molregno) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if molecule_dictionary exists") + } + + return exists, nil +} diff --git a/models/molecule_frac_classification.go b/models/molecule_frac_classification.go new file mode 100644 index 0000000..afb6e93 --- /dev/null +++ b/models/molecule_frac_classification.go @@ -0,0 +1,1241 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// MoleculeFracClassification is an object representing the database table. +type MoleculeFracClassification struct { + MolFracID int64 `boil:"mol_frac_id" json:"mol_frac_id" toml:"mol_frac_id" yaml:"mol_frac_id"` + FracClassID int64 `boil:"frac_class_id" json:"frac_class_id" toml:"frac_class_id" yaml:"frac_class_id"` + Molregno int64 `boil:"molregno" json:"molregno" toml:"molregno" yaml:"molregno"` + + R *moleculeFracClassificationR `boil:"-" json:"-" toml:"-" yaml:"-"` + L moleculeFracClassificationL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var MoleculeFracClassificationColumns = struct { + MolFracID string + FracClassID string + Molregno string +}{ + MolFracID: "mol_frac_id", + FracClassID: "frac_class_id", + Molregno: "molregno", +} + +var MoleculeFracClassificationTableColumns = struct { + MolFracID string + FracClassID string + Molregno string +}{ + MolFracID: "molecule_frac_classification.mol_frac_id", + FracClassID: "molecule_frac_classification.frac_class_id", + Molregno: "molecule_frac_classification.molregno", +} + +// Generated where + +var MoleculeFracClassificationWhere = struct { + MolFracID whereHelperint64 + FracClassID whereHelperint64 + Molregno whereHelperint64 +}{ + MolFracID: whereHelperint64{field: "\"molecule_frac_classification\".\"mol_frac_id\""}, + FracClassID: whereHelperint64{field: "\"molecule_frac_classification\".\"frac_class_id\""}, + Molregno: whereHelperint64{field: "\"molecule_frac_classification\".\"molregno\""}, +} + +// MoleculeFracClassificationRels is where relationship names are stored. +var MoleculeFracClassificationRels = struct { + MolregnoMoleculeDictionary string + FracClass string +}{ + MolregnoMoleculeDictionary: "MolregnoMoleculeDictionary", + FracClass: "FracClass", +} + +// moleculeFracClassificationR is where relationships are stored. +type moleculeFracClassificationR struct { + MolregnoMoleculeDictionary *MoleculeDictionary `boil:"MolregnoMoleculeDictionary" json:"MolregnoMoleculeDictionary" toml:"MolregnoMoleculeDictionary" yaml:"MolregnoMoleculeDictionary"` + FracClass *FracClassification `boil:"FracClass" json:"FracClass" toml:"FracClass" yaml:"FracClass"` +} + +// NewStruct creates a new relationship struct +func (*moleculeFracClassificationR) NewStruct() *moleculeFracClassificationR { + return &moleculeFracClassificationR{} +} + +func (r *moleculeFracClassificationR) GetMolregnoMoleculeDictionary() *MoleculeDictionary { + if r == nil { + return nil + } + return r.MolregnoMoleculeDictionary +} + +func (r *moleculeFracClassificationR) GetFracClass() *FracClassification { + if r == nil { + return nil + } + return r.FracClass +} + +// moleculeFracClassificationL is where Load methods for each relationship are stored. +type moleculeFracClassificationL struct{} + +var ( + moleculeFracClassificationAllColumns = []string{"mol_frac_id", "frac_class_id", "molregno"} + moleculeFracClassificationColumnsWithoutDefault = []string{"mol_frac_id", "frac_class_id", "molregno"} + moleculeFracClassificationColumnsWithDefault = []string{} + moleculeFracClassificationPrimaryKeyColumns = []string{"mol_frac_id"} + moleculeFracClassificationGeneratedColumns = []string{} +) + +type ( + // MoleculeFracClassificationSlice is an alias for a slice of pointers to MoleculeFracClassification. + // This should almost always be used instead of []MoleculeFracClassification. + MoleculeFracClassificationSlice []*MoleculeFracClassification + // MoleculeFracClassificationHook is the signature for custom MoleculeFracClassification hook methods + MoleculeFracClassificationHook func(context.Context, boil.ContextExecutor, *MoleculeFracClassification) error + + moleculeFracClassificationQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + moleculeFracClassificationType = reflect.TypeOf(&MoleculeFracClassification{}) + moleculeFracClassificationMapping = queries.MakeStructMapping(moleculeFracClassificationType) + moleculeFracClassificationPrimaryKeyMapping, _ = queries.BindMapping(moleculeFracClassificationType, moleculeFracClassificationMapping, moleculeFracClassificationPrimaryKeyColumns) + moleculeFracClassificationInsertCacheMut sync.RWMutex + moleculeFracClassificationInsertCache = make(map[string]insertCache) + moleculeFracClassificationUpdateCacheMut sync.RWMutex + moleculeFracClassificationUpdateCache = make(map[string]updateCache) + moleculeFracClassificationUpsertCacheMut sync.RWMutex + moleculeFracClassificationUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var moleculeFracClassificationAfterSelectHooks []MoleculeFracClassificationHook + +var moleculeFracClassificationBeforeInsertHooks []MoleculeFracClassificationHook +var moleculeFracClassificationAfterInsertHooks []MoleculeFracClassificationHook + +var moleculeFracClassificationBeforeUpdateHooks []MoleculeFracClassificationHook +var moleculeFracClassificationAfterUpdateHooks []MoleculeFracClassificationHook + +var moleculeFracClassificationBeforeDeleteHooks []MoleculeFracClassificationHook +var moleculeFracClassificationAfterDeleteHooks []MoleculeFracClassificationHook + +var moleculeFracClassificationBeforeUpsertHooks []MoleculeFracClassificationHook +var moleculeFracClassificationAfterUpsertHooks []MoleculeFracClassificationHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *MoleculeFracClassification) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeFracClassificationAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *MoleculeFracClassification) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeFracClassificationBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *MoleculeFracClassification) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeFracClassificationAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *MoleculeFracClassification) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeFracClassificationBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *MoleculeFracClassification) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeFracClassificationAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *MoleculeFracClassification) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeFracClassificationBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *MoleculeFracClassification) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeFracClassificationAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *MoleculeFracClassification) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeFracClassificationBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *MoleculeFracClassification) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeFracClassificationAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddMoleculeFracClassificationHook registers your hook function for all future operations. +func AddMoleculeFracClassificationHook(hookPoint boil.HookPoint, moleculeFracClassificationHook MoleculeFracClassificationHook) { + switch hookPoint { + case boil.AfterSelectHook: + moleculeFracClassificationAfterSelectHooks = append(moleculeFracClassificationAfterSelectHooks, moleculeFracClassificationHook) + case boil.BeforeInsertHook: + moleculeFracClassificationBeforeInsertHooks = append(moleculeFracClassificationBeforeInsertHooks, moleculeFracClassificationHook) + case boil.AfterInsertHook: + moleculeFracClassificationAfterInsertHooks = append(moleculeFracClassificationAfterInsertHooks, moleculeFracClassificationHook) + case boil.BeforeUpdateHook: + moleculeFracClassificationBeforeUpdateHooks = append(moleculeFracClassificationBeforeUpdateHooks, moleculeFracClassificationHook) + case boil.AfterUpdateHook: + moleculeFracClassificationAfterUpdateHooks = append(moleculeFracClassificationAfterUpdateHooks, moleculeFracClassificationHook) + case boil.BeforeDeleteHook: + moleculeFracClassificationBeforeDeleteHooks = append(moleculeFracClassificationBeforeDeleteHooks, moleculeFracClassificationHook) + case boil.AfterDeleteHook: + moleculeFracClassificationAfterDeleteHooks = append(moleculeFracClassificationAfterDeleteHooks, moleculeFracClassificationHook) + case boil.BeforeUpsertHook: + moleculeFracClassificationBeforeUpsertHooks = append(moleculeFracClassificationBeforeUpsertHooks, moleculeFracClassificationHook) + case boil.AfterUpsertHook: + moleculeFracClassificationAfterUpsertHooks = append(moleculeFracClassificationAfterUpsertHooks, moleculeFracClassificationHook) + } +} + +// One returns a single moleculeFracClassification record from the query. +func (q moleculeFracClassificationQuery) One(ctx context.Context, exec boil.ContextExecutor) (*MoleculeFracClassification, error) { + o := &MoleculeFracClassification{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for molecule_frac_classification") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all MoleculeFracClassification records from the query. +func (q moleculeFracClassificationQuery) All(ctx context.Context, exec boil.ContextExecutor) (MoleculeFracClassificationSlice, error) { + var o []*MoleculeFracClassification + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to MoleculeFracClassification slice") + } + + if len(moleculeFracClassificationAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all MoleculeFracClassification records in the query. +func (q moleculeFracClassificationQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count molecule_frac_classification rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q moleculeFracClassificationQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if molecule_frac_classification exists") + } + + return count > 0, nil +} + +// MolregnoMoleculeDictionary pointed to by the foreign key. +func (o *MoleculeFracClassification) MolregnoMoleculeDictionary(mods ...qm.QueryMod) moleculeDictionaryQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"molregno\" = ?", o.Molregno), + } + + queryMods = append(queryMods, mods...) + + return MoleculeDictionaries(queryMods...) +} + +// FracClass pointed to by the foreign key. +func (o *MoleculeFracClassification) FracClass(mods ...qm.QueryMod) fracClassificationQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"frac_class_id\" = ?", o.FracClassID), + } + + queryMods = append(queryMods, mods...) + + return FracClassifications(queryMods...) +} + +// LoadMolregnoMoleculeDictionary allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (moleculeFracClassificationL) LoadMolregnoMoleculeDictionary(ctx context.Context, e boil.ContextExecutor, singular bool, maybeMoleculeFracClassification interface{}, mods queries.Applicator) error { + var slice []*MoleculeFracClassification + var object *MoleculeFracClassification + + if singular { + object = maybeMoleculeFracClassification.(*MoleculeFracClassification) + } else { + slice = *maybeMoleculeFracClassification.(*[]*MoleculeFracClassification) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &moleculeFracClassificationR{} + } + args = append(args, object.Molregno) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &moleculeFracClassificationR{} + } + + for _, a := range args { + if a == obj.Molregno { + continue Outer + } + } + + args = append(args, obj.Molregno) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`molecule_dictionary`), + qm.WhereIn(`molecule_dictionary.molregno in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load MoleculeDictionary") + } + + var resultSlice []*MoleculeDictionary + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice MoleculeDictionary") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for molecule_dictionary") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for molecule_dictionary") + } + + if len(moleculeFracClassificationAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.MolregnoMoleculeDictionary = foreign + if foreign.R == nil { + foreign.R = &moleculeDictionaryR{} + } + foreign.R.MolregnoMoleculeFracClassifications = append(foreign.R.MolregnoMoleculeFracClassifications, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.Molregno == foreign.Molregno { + local.R.MolregnoMoleculeDictionary = foreign + if foreign.R == nil { + foreign.R = &moleculeDictionaryR{} + } + foreign.R.MolregnoMoleculeFracClassifications = append(foreign.R.MolregnoMoleculeFracClassifications, local) + break + } + } + } + + return nil +} + +// LoadFracClass allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (moleculeFracClassificationL) LoadFracClass(ctx context.Context, e boil.ContextExecutor, singular bool, maybeMoleculeFracClassification interface{}, mods queries.Applicator) error { + var slice []*MoleculeFracClassification + var object *MoleculeFracClassification + + if singular { + object = maybeMoleculeFracClassification.(*MoleculeFracClassification) + } else { + slice = *maybeMoleculeFracClassification.(*[]*MoleculeFracClassification) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &moleculeFracClassificationR{} + } + args = append(args, object.FracClassID) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &moleculeFracClassificationR{} + } + + for _, a := range args { + if a == obj.FracClassID { + continue Outer + } + } + + args = append(args, obj.FracClassID) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`frac_classification`), + qm.WhereIn(`frac_classification.frac_class_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load FracClassification") + } + + var resultSlice []*FracClassification + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice FracClassification") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for frac_classification") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for frac_classification") + } + + if len(moleculeFracClassificationAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.FracClass = foreign + if foreign.R == nil { + foreign.R = &fracClassificationR{} + } + foreign.R.FracClassMoleculeFracClassifications = append(foreign.R.FracClassMoleculeFracClassifications, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.FracClassID == foreign.FracClassID { + local.R.FracClass = foreign + if foreign.R == nil { + foreign.R = &fracClassificationR{} + } + foreign.R.FracClassMoleculeFracClassifications = append(foreign.R.FracClassMoleculeFracClassifications, local) + break + } + } + } + + return nil +} + +// SetMolregnoMoleculeDictionary of the moleculeFracClassification to the related item. +// Sets o.R.MolregnoMoleculeDictionary to related. +// Adds o to related.R.MolregnoMoleculeFracClassifications. +func (o *MoleculeFracClassification) SetMolregnoMoleculeDictionary(ctx context.Context, exec boil.ContextExecutor, insert bool, related *MoleculeDictionary) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"molecule_frac_classification\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"molregno"}), + strmangle.WhereClause("\"", "\"", 0, moleculeFracClassificationPrimaryKeyColumns), + ) + values := []interface{}{related.Molregno, o.MolFracID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.Molregno = related.Molregno + if o.R == nil { + o.R = &moleculeFracClassificationR{ + MolregnoMoleculeDictionary: related, + } + } else { + o.R.MolregnoMoleculeDictionary = related + } + + if related.R == nil { + related.R = &moleculeDictionaryR{ + MolregnoMoleculeFracClassifications: MoleculeFracClassificationSlice{o}, + } + } else { + related.R.MolregnoMoleculeFracClassifications = append(related.R.MolregnoMoleculeFracClassifications, o) + } + + return nil +} + +// SetFracClass of the moleculeFracClassification to the related item. +// Sets o.R.FracClass to related. +// Adds o to related.R.FracClassMoleculeFracClassifications. +func (o *MoleculeFracClassification) SetFracClass(ctx context.Context, exec boil.ContextExecutor, insert bool, related *FracClassification) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"molecule_frac_classification\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"frac_class_id"}), + strmangle.WhereClause("\"", "\"", 0, moleculeFracClassificationPrimaryKeyColumns), + ) + values := []interface{}{related.FracClassID, o.MolFracID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.FracClassID = related.FracClassID + if o.R == nil { + o.R = &moleculeFracClassificationR{ + FracClass: related, + } + } else { + o.R.FracClass = related + } + + if related.R == nil { + related.R = &fracClassificationR{ + FracClassMoleculeFracClassifications: MoleculeFracClassificationSlice{o}, + } + } else { + related.R.FracClassMoleculeFracClassifications = append(related.R.FracClassMoleculeFracClassifications, o) + } + + return nil +} + +// MoleculeFracClassifications retrieves all the records using an executor. +func MoleculeFracClassifications(mods ...qm.QueryMod) moleculeFracClassificationQuery { + mods = append(mods, qm.From("\"molecule_frac_classification\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"molecule_frac_classification\".*"}) + } + + return moleculeFracClassificationQuery{q} +} + +// FindMoleculeFracClassification retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindMoleculeFracClassification(ctx context.Context, exec boil.ContextExecutor, molFracID int64, selectCols ...string) (*MoleculeFracClassification, error) { + moleculeFracClassificationObj := &MoleculeFracClassification{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"molecule_frac_classification\" where \"mol_frac_id\"=?", sel, + ) + + q := queries.Raw(query, molFracID) + + err := q.Bind(ctx, exec, moleculeFracClassificationObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from molecule_frac_classification") + } + + if err = moleculeFracClassificationObj.doAfterSelectHooks(ctx, exec); err != nil { + return moleculeFracClassificationObj, err + } + + return moleculeFracClassificationObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *MoleculeFracClassification) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no molecule_frac_classification provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(moleculeFracClassificationColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + moleculeFracClassificationInsertCacheMut.RLock() + cache, cached := moleculeFracClassificationInsertCache[key] + moleculeFracClassificationInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + moleculeFracClassificationAllColumns, + moleculeFracClassificationColumnsWithDefault, + moleculeFracClassificationColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(moleculeFracClassificationType, moleculeFracClassificationMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(moleculeFracClassificationType, moleculeFracClassificationMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"molecule_frac_classification\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"molecule_frac_classification\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into molecule_frac_classification") + } + + if !cached { + moleculeFracClassificationInsertCacheMut.Lock() + moleculeFracClassificationInsertCache[key] = cache + moleculeFracClassificationInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the MoleculeFracClassification. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *MoleculeFracClassification) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + moleculeFracClassificationUpdateCacheMut.RLock() + cache, cached := moleculeFracClassificationUpdateCache[key] + moleculeFracClassificationUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + moleculeFracClassificationAllColumns, + moleculeFracClassificationPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update molecule_frac_classification, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"molecule_frac_classification\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, moleculeFracClassificationPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(moleculeFracClassificationType, moleculeFracClassificationMapping, append(wl, moleculeFracClassificationPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update molecule_frac_classification row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for molecule_frac_classification") + } + + if !cached { + moleculeFracClassificationUpdateCacheMut.Lock() + moleculeFracClassificationUpdateCache[key] = cache + moleculeFracClassificationUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q moleculeFracClassificationQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for molecule_frac_classification") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for molecule_frac_classification") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o MoleculeFracClassificationSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), moleculeFracClassificationPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"molecule_frac_classification\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, moleculeFracClassificationPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in moleculeFracClassification slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all moleculeFracClassification") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *MoleculeFracClassification) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no molecule_frac_classification provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(moleculeFracClassificationColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + moleculeFracClassificationUpsertCacheMut.RLock() + cache, cached := moleculeFracClassificationUpsertCache[key] + moleculeFracClassificationUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + moleculeFracClassificationAllColumns, + moleculeFracClassificationColumnsWithDefault, + moleculeFracClassificationColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + moleculeFracClassificationAllColumns, + moleculeFracClassificationPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert molecule_frac_classification, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(moleculeFracClassificationPrimaryKeyColumns)) + copy(conflict, moleculeFracClassificationPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"molecule_frac_classification\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(moleculeFracClassificationType, moleculeFracClassificationMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(moleculeFracClassificationType, moleculeFracClassificationMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert molecule_frac_classification") + } + + if !cached { + moleculeFracClassificationUpsertCacheMut.Lock() + moleculeFracClassificationUpsertCache[key] = cache + moleculeFracClassificationUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single MoleculeFracClassification record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *MoleculeFracClassification) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no MoleculeFracClassification provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), moleculeFracClassificationPrimaryKeyMapping) + sql := "DELETE FROM \"molecule_frac_classification\" WHERE \"mol_frac_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from molecule_frac_classification") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for molecule_frac_classification") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q moleculeFracClassificationQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no moleculeFracClassificationQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from molecule_frac_classification") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for molecule_frac_classification") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o MoleculeFracClassificationSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(moleculeFracClassificationBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), moleculeFracClassificationPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"molecule_frac_classification\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, moleculeFracClassificationPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from moleculeFracClassification slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for molecule_frac_classification") + } + + if len(moleculeFracClassificationAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *MoleculeFracClassification) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindMoleculeFracClassification(ctx, exec, o.MolFracID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *MoleculeFracClassificationSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := MoleculeFracClassificationSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), moleculeFracClassificationPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"molecule_frac_classification\".* FROM \"molecule_frac_classification\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, moleculeFracClassificationPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in MoleculeFracClassificationSlice") + } + + *o = slice + + return nil +} + +// MoleculeFracClassificationExists checks if the MoleculeFracClassification row exists. +func MoleculeFracClassificationExists(ctx context.Context, exec boil.ContextExecutor, molFracID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"molecule_frac_classification\" where \"mol_frac_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, molFracID) + } + row := exec.QueryRowContext(ctx, sql, molFracID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if molecule_frac_classification exists") + } + + return exists, nil +} diff --git a/models/molecule_hierarchy.go b/models/molecule_hierarchy.go new file mode 100644 index 0000000..41a1f1e --- /dev/null +++ b/models/molecule_hierarchy.go @@ -0,0 +1,1488 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// MoleculeHierarchy is an object representing the database table. +type MoleculeHierarchy struct { + Molregno int64 `boil:"molregno" json:"molregno" toml:"molregno" yaml:"molregno"` + ParentMolregno null.Int64 `boil:"parent_molregno" json:"parent_molregno,omitempty" toml:"parent_molregno" yaml:"parent_molregno,omitempty"` + ActiveMolregno null.Int64 `boil:"active_molregno" json:"active_molregno,omitempty" toml:"active_molregno" yaml:"active_molregno,omitempty"` + + R *moleculeHierarchyR `boil:"-" json:"-" toml:"-" yaml:"-"` + L moleculeHierarchyL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var MoleculeHierarchyColumns = struct { + Molregno string + ParentMolregno string + ActiveMolregno string +}{ + Molregno: "molregno", + ParentMolregno: "parent_molregno", + ActiveMolregno: "active_molregno", +} + +var MoleculeHierarchyTableColumns = struct { + Molregno string + ParentMolregno string + ActiveMolregno string +}{ + Molregno: "molecule_hierarchy.molregno", + ParentMolregno: "molecule_hierarchy.parent_molregno", + ActiveMolregno: "molecule_hierarchy.active_molregno", +} + +// Generated where + +var MoleculeHierarchyWhere = struct { + Molregno whereHelperint64 + ParentMolregno whereHelpernull_Int64 + ActiveMolregno whereHelpernull_Int64 +}{ + Molregno: whereHelperint64{field: "\"molecule_hierarchy\".\"molregno\""}, + ParentMolregno: whereHelpernull_Int64{field: "\"molecule_hierarchy\".\"parent_molregno\""}, + ActiveMolregno: whereHelpernull_Int64{field: "\"molecule_hierarchy\".\"active_molregno\""}, +} + +// MoleculeHierarchyRels is where relationship names are stored. +var MoleculeHierarchyRels = struct { + ParentMolregnoMoleculeDictionary string + MolregnoMoleculeDictionary string + ActiveMolregnoMoleculeDictionary string +}{ + ParentMolregnoMoleculeDictionary: "ParentMolregnoMoleculeDictionary", + MolregnoMoleculeDictionary: "MolregnoMoleculeDictionary", + ActiveMolregnoMoleculeDictionary: "ActiveMolregnoMoleculeDictionary", +} + +// moleculeHierarchyR is where relationships are stored. +type moleculeHierarchyR struct { + ParentMolregnoMoleculeDictionary *MoleculeDictionary `boil:"ParentMolregnoMoleculeDictionary" json:"ParentMolregnoMoleculeDictionary" toml:"ParentMolregnoMoleculeDictionary" yaml:"ParentMolregnoMoleculeDictionary"` + MolregnoMoleculeDictionary *MoleculeDictionary `boil:"MolregnoMoleculeDictionary" json:"MolregnoMoleculeDictionary" toml:"MolregnoMoleculeDictionary" yaml:"MolregnoMoleculeDictionary"` + ActiveMolregnoMoleculeDictionary *MoleculeDictionary `boil:"ActiveMolregnoMoleculeDictionary" json:"ActiveMolregnoMoleculeDictionary" toml:"ActiveMolregnoMoleculeDictionary" yaml:"ActiveMolregnoMoleculeDictionary"` +} + +// NewStruct creates a new relationship struct +func (*moleculeHierarchyR) NewStruct() *moleculeHierarchyR { + return &moleculeHierarchyR{} +} + +func (r *moleculeHierarchyR) GetParentMolregnoMoleculeDictionary() *MoleculeDictionary { + if r == nil { + return nil + } + return r.ParentMolregnoMoleculeDictionary +} + +func (r *moleculeHierarchyR) GetMolregnoMoleculeDictionary() *MoleculeDictionary { + if r == nil { + return nil + } + return r.MolregnoMoleculeDictionary +} + +func (r *moleculeHierarchyR) GetActiveMolregnoMoleculeDictionary() *MoleculeDictionary { + if r == nil { + return nil + } + return r.ActiveMolregnoMoleculeDictionary +} + +// moleculeHierarchyL is where Load methods for each relationship are stored. +type moleculeHierarchyL struct{} + +var ( + moleculeHierarchyAllColumns = []string{"molregno", "parent_molregno", "active_molregno"} + moleculeHierarchyColumnsWithoutDefault = []string{"molregno"} + moleculeHierarchyColumnsWithDefault = []string{"parent_molregno", "active_molregno"} + moleculeHierarchyPrimaryKeyColumns = []string{"molregno"} + moleculeHierarchyGeneratedColumns = []string{} +) + +type ( + // MoleculeHierarchySlice is an alias for a slice of pointers to MoleculeHierarchy. + // This should almost always be used instead of []MoleculeHierarchy. + MoleculeHierarchySlice []*MoleculeHierarchy + // MoleculeHierarchyHook is the signature for custom MoleculeHierarchy hook methods + MoleculeHierarchyHook func(context.Context, boil.ContextExecutor, *MoleculeHierarchy) error + + moleculeHierarchyQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + moleculeHierarchyType = reflect.TypeOf(&MoleculeHierarchy{}) + moleculeHierarchyMapping = queries.MakeStructMapping(moleculeHierarchyType) + moleculeHierarchyPrimaryKeyMapping, _ = queries.BindMapping(moleculeHierarchyType, moleculeHierarchyMapping, moleculeHierarchyPrimaryKeyColumns) + moleculeHierarchyInsertCacheMut sync.RWMutex + moleculeHierarchyInsertCache = make(map[string]insertCache) + moleculeHierarchyUpdateCacheMut sync.RWMutex + moleculeHierarchyUpdateCache = make(map[string]updateCache) + moleculeHierarchyUpsertCacheMut sync.RWMutex + moleculeHierarchyUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var moleculeHierarchyAfterSelectHooks []MoleculeHierarchyHook + +var moleculeHierarchyBeforeInsertHooks []MoleculeHierarchyHook +var moleculeHierarchyAfterInsertHooks []MoleculeHierarchyHook + +var moleculeHierarchyBeforeUpdateHooks []MoleculeHierarchyHook +var moleculeHierarchyAfterUpdateHooks []MoleculeHierarchyHook + +var moleculeHierarchyBeforeDeleteHooks []MoleculeHierarchyHook +var moleculeHierarchyAfterDeleteHooks []MoleculeHierarchyHook + +var moleculeHierarchyBeforeUpsertHooks []MoleculeHierarchyHook +var moleculeHierarchyAfterUpsertHooks []MoleculeHierarchyHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *MoleculeHierarchy) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeHierarchyAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *MoleculeHierarchy) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeHierarchyBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *MoleculeHierarchy) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeHierarchyAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *MoleculeHierarchy) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeHierarchyBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *MoleculeHierarchy) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeHierarchyAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *MoleculeHierarchy) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeHierarchyBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *MoleculeHierarchy) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeHierarchyAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *MoleculeHierarchy) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeHierarchyBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *MoleculeHierarchy) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeHierarchyAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddMoleculeHierarchyHook registers your hook function for all future operations. +func AddMoleculeHierarchyHook(hookPoint boil.HookPoint, moleculeHierarchyHook MoleculeHierarchyHook) { + switch hookPoint { + case boil.AfterSelectHook: + moleculeHierarchyAfterSelectHooks = append(moleculeHierarchyAfterSelectHooks, moleculeHierarchyHook) + case boil.BeforeInsertHook: + moleculeHierarchyBeforeInsertHooks = append(moleculeHierarchyBeforeInsertHooks, moleculeHierarchyHook) + case boil.AfterInsertHook: + moleculeHierarchyAfterInsertHooks = append(moleculeHierarchyAfterInsertHooks, moleculeHierarchyHook) + case boil.BeforeUpdateHook: + moleculeHierarchyBeforeUpdateHooks = append(moleculeHierarchyBeforeUpdateHooks, moleculeHierarchyHook) + case boil.AfterUpdateHook: + moleculeHierarchyAfterUpdateHooks = append(moleculeHierarchyAfterUpdateHooks, moleculeHierarchyHook) + case boil.BeforeDeleteHook: + moleculeHierarchyBeforeDeleteHooks = append(moleculeHierarchyBeforeDeleteHooks, moleculeHierarchyHook) + case boil.AfterDeleteHook: + moleculeHierarchyAfterDeleteHooks = append(moleculeHierarchyAfterDeleteHooks, moleculeHierarchyHook) + case boil.BeforeUpsertHook: + moleculeHierarchyBeforeUpsertHooks = append(moleculeHierarchyBeforeUpsertHooks, moleculeHierarchyHook) + case boil.AfterUpsertHook: + moleculeHierarchyAfterUpsertHooks = append(moleculeHierarchyAfterUpsertHooks, moleculeHierarchyHook) + } +} + +// One returns a single moleculeHierarchy record from the query. +func (q moleculeHierarchyQuery) One(ctx context.Context, exec boil.ContextExecutor) (*MoleculeHierarchy, error) { + o := &MoleculeHierarchy{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for molecule_hierarchy") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all MoleculeHierarchy records from the query. +func (q moleculeHierarchyQuery) All(ctx context.Context, exec boil.ContextExecutor) (MoleculeHierarchySlice, error) { + var o []*MoleculeHierarchy + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to MoleculeHierarchy slice") + } + + if len(moleculeHierarchyAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all MoleculeHierarchy records in the query. +func (q moleculeHierarchyQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count molecule_hierarchy rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q moleculeHierarchyQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if molecule_hierarchy exists") + } + + return count > 0, nil +} + +// ParentMolregnoMoleculeDictionary pointed to by the foreign key. +func (o *MoleculeHierarchy) ParentMolregnoMoleculeDictionary(mods ...qm.QueryMod) moleculeDictionaryQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"molregno\" = ?", o.ParentMolregno), + } + + queryMods = append(queryMods, mods...) + + return MoleculeDictionaries(queryMods...) +} + +// MolregnoMoleculeDictionary pointed to by the foreign key. +func (o *MoleculeHierarchy) MolregnoMoleculeDictionary(mods ...qm.QueryMod) moleculeDictionaryQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"molregno\" = ?", o.Molregno), + } + + queryMods = append(queryMods, mods...) + + return MoleculeDictionaries(queryMods...) +} + +// ActiveMolregnoMoleculeDictionary pointed to by the foreign key. +func (o *MoleculeHierarchy) ActiveMolregnoMoleculeDictionary(mods ...qm.QueryMod) moleculeDictionaryQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"molregno\" = ?", o.ActiveMolregno), + } + + queryMods = append(queryMods, mods...) + + return MoleculeDictionaries(queryMods...) +} + +// LoadParentMolregnoMoleculeDictionary allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (moleculeHierarchyL) LoadParentMolregnoMoleculeDictionary(ctx context.Context, e boil.ContextExecutor, singular bool, maybeMoleculeHierarchy interface{}, mods queries.Applicator) error { + var slice []*MoleculeHierarchy + var object *MoleculeHierarchy + + if singular { + object = maybeMoleculeHierarchy.(*MoleculeHierarchy) + } else { + slice = *maybeMoleculeHierarchy.(*[]*MoleculeHierarchy) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &moleculeHierarchyR{} + } + if !queries.IsNil(object.ParentMolregno) { + args = append(args, object.ParentMolregno) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &moleculeHierarchyR{} + } + + for _, a := range args { + if queries.Equal(a, obj.ParentMolregno) { + continue Outer + } + } + + if !queries.IsNil(obj.ParentMolregno) { + args = append(args, obj.ParentMolregno) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`molecule_dictionary`), + qm.WhereIn(`molecule_dictionary.molregno in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load MoleculeDictionary") + } + + var resultSlice []*MoleculeDictionary + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice MoleculeDictionary") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for molecule_dictionary") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for molecule_dictionary") + } + + if len(moleculeHierarchyAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.ParentMolregnoMoleculeDictionary = foreign + if foreign.R == nil { + foreign.R = &moleculeDictionaryR{} + } + foreign.R.ParentMolregnoMoleculeHierarchies = append(foreign.R.ParentMolregnoMoleculeHierarchies, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.ParentMolregno, foreign.Molregno) { + local.R.ParentMolregnoMoleculeDictionary = foreign + if foreign.R == nil { + foreign.R = &moleculeDictionaryR{} + } + foreign.R.ParentMolregnoMoleculeHierarchies = append(foreign.R.ParentMolregnoMoleculeHierarchies, local) + break + } + } + } + + return nil +} + +// LoadMolregnoMoleculeDictionary allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (moleculeHierarchyL) LoadMolregnoMoleculeDictionary(ctx context.Context, e boil.ContextExecutor, singular bool, maybeMoleculeHierarchy interface{}, mods queries.Applicator) error { + var slice []*MoleculeHierarchy + var object *MoleculeHierarchy + + if singular { + object = maybeMoleculeHierarchy.(*MoleculeHierarchy) + } else { + slice = *maybeMoleculeHierarchy.(*[]*MoleculeHierarchy) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &moleculeHierarchyR{} + } + args = append(args, object.Molregno) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &moleculeHierarchyR{} + } + + for _, a := range args { + if a == obj.Molregno { + continue Outer + } + } + + args = append(args, obj.Molregno) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`molecule_dictionary`), + qm.WhereIn(`molecule_dictionary.molregno in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load MoleculeDictionary") + } + + var resultSlice []*MoleculeDictionary + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice MoleculeDictionary") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for molecule_dictionary") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for molecule_dictionary") + } + + if len(moleculeHierarchyAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.MolregnoMoleculeDictionary = foreign + if foreign.R == nil { + foreign.R = &moleculeDictionaryR{} + } + foreign.R.MolregnoMoleculeHierarchy = object + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.Molregno == foreign.Molregno { + local.R.MolregnoMoleculeDictionary = foreign + if foreign.R == nil { + foreign.R = &moleculeDictionaryR{} + } + foreign.R.MolregnoMoleculeHierarchy = local + break + } + } + } + + return nil +} + +// LoadActiveMolregnoMoleculeDictionary allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (moleculeHierarchyL) LoadActiveMolregnoMoleculeDictionary(ctx context.Context, e boil.ContextExecutor, singular bool, maybeMoleculeHierarchy interface{}, mods queries.Applicator) error { + var slice []*MoleculeHierarchy + var object *MoleculeHierarchy + + if singular { + object = maybeMoleculeHierarchy.(*MoleculeHierarchy) + } else { + slice = *maybeMoleculeHierarchy.(*[]*MoleculeHierarchy) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &moleculeHierarchyR{} + } + if !queries.IsNil(object.ActiveMolregno) { + args = append(args, object.ActiveMolregno) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &moleculeHierarchyR{} + } + + for _, a := range args { + if queries.Equal(a, obj.ActiveMolregno) { + continue Outer + } + } + + if !queries.IsNil(obj.ActiveMolregno) { + args = append(args, obj.ActiveMolregno) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`molecule_dictionary`), + qm.WhereIn(`molecule_dictionary.molregno in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load MoleculeDictionary") + } + + var resultSlice []*MoleculeDictionary + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice MoleculeDictionary") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for molecule_dictionary") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for molecule_dictionary") + } + + if len(moleculeHierarchyAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.ActiveMolregnoMoleculeDictionary = foreign + if foreign.R == nil { + foreign.R = &moleculeDictionaryR{} + } + foreign.R.ActiveMolregnoMoleculeHierarchies = append(foreign.R.ActiveMolregnoMoleculeHierarchies, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.ActiveMolregno, foreign.Molregno) { + local.R.ActiveMolregnoMoleculeDictionary = foreign + if foreign.R == nil { + foreign.R = &moleculeDictionaryR{} + } + foreign.R.ActiveMolregnoMoleculeHierarchies = append(foreign.R.ActiveMolregnoMoleculeHierarchies, local) + break + } + } + } + + return nil +} + +// SetParentMolregnoMoleculeDictionary of the moleculeHierarchy to the related item. +// Sets o.R.ParentMolregnoMoleculeDictionary to related. +// Adds o to related.R.ParentMolregnoMoleculeHierarchies. +func (o *MoleculeHierarchy) SetParentMolregnoMoleculeDictionary(ctx context.Context, exec boil.ContextExecutor, insert bool, related *MoleculeDictionary) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"molecule_hierarchy\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"parent_molregno"}), + strmangle.WhereClause("\"", "\"", 0, moleculeHierarchyPrimaryKeyColumns), + ) + values := []interface{}{related.Molregno, o.Molregno} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.ParentMolregno, related.Molregno) + if o.R == nil { + o.R = &moleculeHierarchyR{ + ParentMolregnoMoleculeDictionary: related, + } + } else { + o.R.ParentMolregnoMoleculeDictionary = related + } + + if related.R == nil { + related.R = &moleculeDictionaryR{ + ParentMolregnoMoleculeHierarchies: MoleculeHierarchySlice{o}, + } + } else { + related.R.ParentMolregnoMoleculeHierarchies = append(related.R.ParentMolregnoMoleculeHierarchies, o) + } + + return nil +} + +// RemoveParentMolregnoMoleculeDictionary relationship. +// Sets o.R.ParentMolregnoMoleculeDictionary to nil. +// Removes o from all passed in related items' relationships struct. +func (o *MoleculeHierarchy) RemoveParentMolregnoMoleculeDictionary(ctx context.Context, exec boil.ContextExecutor, related *MoleculeDictionary) error { + var err error + + queries.SetScanner(&o.ParentMolregno, nil) + if _, err = o.Update(ctx, exec, boil.Whitelist("parent_molregno")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.ParentMolregnoMoleculeDictionary = nil + } + if related == nil || related.R == nil { + return nil + } + + for i, ri := range related.R.ParentMolregnoMoleculeHierarchies { + if queries.Equal(o.ParentMolregno, ri.ParentMolregno) { + continue + } + + ln := len(related.R.ParentMolregnoMoleculeHierarchies) + if ln > 1 && i < ln-1 { + related.R.ParentMolregnoMoleculeHierarchies[i] = related.R.ParentMolregnoMoleculeHierarchies[ln-1] + } + related.R.ParentMolregnoMoleculeHierarchies = related.R.ParentMolregnoMoleculeHierarchies[:ln-1] + break + } + return nil +} + +// SetMolregnoMoleculeDictionary of the moleculeHierarchy to the related item. +// Sets o.R.MolregnoMoleculeDictionary to related. +// Adds o to related.R.MolregnoMoleculeHierarchy. +func (o *MoleculeHierarchy) SetMolregnoMoleculeDictionary(ctx context.Context, exec boil.ContextExecutor, insert bool, related *MoleculeDictionary) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"molecule_hierarchy\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"molregno"}), + strmangle.WhereClause("\"", "\"", 0, moleculeHierarchyPrimaryKeyColumns), + ) + values := []interface{}{related.Molregno, o.Molregno} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.Molregno = related.Molregno + if o.R == nil { + o.R = &moleculeHierarchyR{ + MolregnoMoleculeDictionary: related, + } + } else { + o.R.MolregnoMoleculeDictionary = related + } + + if related.R == nil { + related.R = &moleculeDictionaryR{ + MolregnoMoleculeHierarchy: o, + } + } else { + related.R.MolregnoMoleculeHierarchy = o + } + + return nil +} + +// SetActiveMolregnoMoleculeDictionary of the moleculeHierarchy to the related item. +// Sets o.R.ActiveMolregnoMoleculeDictionary to related. +// Adds o to related.R.ActiveMolregnoMoleculeHierarchies. +func (o *MoleculeHierarchy) SetActiveMolregnoMoleculeDictionary(ctx context.Context, exec boil.ContextExecutor, insert bool, related *MoleculeDictionary) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"molecule_hierarchy\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"active_molregno"}), + strmangle.WhereClause("\"", "\"", 0, moleculeHierarchyPrimaryKeyColumns), + ) + values := []interface{}{related.Molregno, o.Molregno} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.ActiveMolregno, related.Molregno) + if o.R == nil { + o.R = &moleculeHierarchyR{ + ActiveMolregnoMoleculeDictionary: related, + } + } else { + o.R.ActiveMolregnoMoleculeDictionary = related + } + + if related.R == nil { + related.R = &moleculeDictionaryR{ + ActiveMolregnoMoleculeHierarchies: MoleculeHierarchySlice{o}, + } + } else { + related.R.ActiveMolregnoMoleculeHierarchies = append(related.R.ActiveMolregnoMoleculeHierarchies, o) + } + + return nil +} + +// RemoveActiveMolregnoMoleculeDictionary relationship. +// Sets o.R.ActiveMolregnoMoleculeDictionary to nil. +// Removes o from all passed in related items' relationships struct. +func (o *MoleculeHierarchy) RemoveActiveMolregnoMoleculeDictionary(ctx context.Context, exec boil.ContextExecutor, related *MoleculeDictionary) error { + var err error + + queries.SetScanner(&o.ActiveMolregno, nil) + if _, err = o.Update(ctx, exec, boil.Whitelist("active_molregno")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.ActiveMolregnoMoleculeDictionary = nil + } + if related == nil || related.R == nil { + return nil + } + + for i, ri := range related.R.ActiveMolregnoMoleculeHierarchies { + if queries.Equal(o.ActiveMolregno, ri.ActiveMolregno) { + continue + } + + ln := len(related.R.ActiveMolregnoMoleculeHierarchies) + if ln > 1 && i < ln-1 { + related.R.ActiveMolregnoMoleculeHierarchies[i] = related.R.ActiveMolregnoMoleculeHierarchies[ln-1] + } + related.R.ActiveMolregnoMoleculeHierarchies = related.R.ActiveMolregnoMoleculeHierarchies[:ln-1] + break + } + return nil +} + +// MoleculeHierarchies retrieves all the records using an executor. +func MoleculeHierarchies(mods ...qm.QueryMod) moleculeHierarchyQuery { + mods = append(mods, qm.From("\"molecule_hierarchy\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"molecule_hierarchy\".*"}) + } + + return moleculeHierarchyQuery{q} +} + +// FindMoleculeHierarchy retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindMoleculeHierarchy(ctx context.Context, exec boil.ContextExecutor, molregno int64, selectCols ...string) (*MoleculeHierarchy, error) { + moleculeHierarchyObj := &MoleculeHierarchy{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"molecule_hierarchy\" where \"molregno\"=?", sel, + ) + + q := queries.Raw(query, molregno) + + err := q.Bind(ctx, exec, moleculeHierarchyObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from molecule_hierarchy") + } + + if err = moleculeHierarchyObj.doAfterSelectHooks(ctx, exec); err != nil { + return moleculeHierarchyObj, err + } + + return moleculeHierarchyObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *MoleculeHierarchy) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no molecule_hierarchy provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(moleculeHierarchyColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + moleculeHierarchyInsertCacheMut.RLock() + cache, cached := moleculeHierarchyInsertCache[key] + moleculeHierarchyInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + moleculeHierarchyAllColumns, + moleculeHierarchyColumnsWithDefault, + moleculeHierarchyColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(moleculeHierarchyType, moleculeHierarchyMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(moleculeHierarchyType, moleculeHierarchyMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"molecule_hierarchy\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"molecule_hierarchy\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into molecule_hierarchy") + } + + if !cached { + moleculeHierarchyInsertCacheMut.Lock() + moleculeHierarchyInsertCache[key] = cache + moleculeHierarchyInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the MoleculeHierarchy. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *MoleculeHierarchy) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + moleculeHierarchyUpdateCacheMut.RLock() + cache, cached := moleculeHierarchyUpdateCache[key] + moleculeHierarchyUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + moleculeHierarchyAllColumns, + moleculeHierarchyPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update molecule_hierarchy, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"molecule_hierarchy\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, moleculeHierarchyPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(moleculeHierarchyType, moleculeHierarchyMapping, append(wl, moleculeHierarchyPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update molecule_hierarchy row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for molecule_hierarchy") + } + + if !cached { + moleculeHierarchyUpdateCacheMut.Lock() + moleculeHierarchyUpdateCache[key] = cache + moleculeHierarchyUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q moleculeHierarchyQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for molecule_hierarchy") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for molecule_hierarchy") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o MoleculeHierarchySlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), moleculeHierarchyPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"molecule_hierarchy\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, moleculeHierarchyPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in moleculeHierarchy slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all moleculeHierarchy") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *MoleculeHierarchy) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no molecule_hierarchy provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(moleculeHierarchyColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + moleculeHierarchyUpsertCacheMut.RLock() + cache, cached := moleculeHierarchyUpsertCache[key] + moleculeHierarchyUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + moleculeHierarchyAllColumns, + moleculeHierarchyColumnsWithDefault, + moleculeHierarchyColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + moleculeHierarchyAllColumns, + moleculeHierarchyPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert molecule_hierarchy, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(moleculeHierarchyPrimaryKeyColumns)) + copy(conflict, moleculeHierarchyPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"molecule_hierarchy\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(moleculeHierarchyType, moleculeHierarchyMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(moleculeHierarchyType, moleculeHierarchyMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert molecule_hierarchy") + } + + if !cached { + moleculeHierarchyUpsertCacheMut.Lock() + moleculeHierarchyUpsertCache[key] = cache + moleculeHierarchyUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single MoleculeHierarchy record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *MoleculeHierarchy) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no MoleculeHierarchy provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), moleculeHierarchyPrimaryKeyMapping) + sql := "DELETE FROM \"molecule_hierarchy\" WHERE \"molregno\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from molecule_hierarchy") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for molecule_hierarchy") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q moleculeHierarchyQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no moleculeHierarchyQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from molecule_hierarchy") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for molecule_hierarchy") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o MoleculeHierarchySlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(moleculeHierarchyBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), moleculeHierarchyPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"molecule_hierarchy\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, moleculeHierarchyPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from moleculeHierarchy slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for molecule_hierarchy") + } + + if len(moleculeHierarchyAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *MoleculeHierarchy) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindMoleculeHierarchy(ctx, exec, o.Molregno) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *MoleculeHierarchySlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := MoleculeHierarchySlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), moleculeHierarchyPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"molecule_hierarchy\".* FROM \"molecule_hierarchy\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, moleculeHierarchyPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in MoleculeHierarchySlice") + } + + *o = slice + + return nil +} + +// MoleculeHierarchyExists checks if the MoleculeHierarchy row exists. +func MoleculeHierarchyExists(ctx context.Context, exec boil.ContextExecutor, molregno int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"molecule_hierarchy\" where \"molregno\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, molregno) + } + row := exec.QueryRowContext(ctx, sql, molregno) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if molecule_hierarchy exists") + } + + return exists, nil +} diff --git a/models/molecule_hrac_classification.go b/models/molecule_hrac_classification.go new file mode 100644 index 0000000..2334cd8 --- /dev/null +++ b/models/molecule_hrac_classification.go @@ -0,0 +1,1241 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// MoleculeHracClassification is an object representing the database table. +type MoleculeHracClassification struct { + MolHracID int64 `boil:"mol_hrac_id" json:"mol_hrac_id" toml:"mol_hrac_id" yaml:"mol_hrac_id"` + HracClassID int64 `boil:"hrac_class_id" json:"hrac_class_id" toml:"hrac_class_id" yaml:"hrac_class_id"` + Molregno int64 `boil:"molregno" json:"molregno" toml:"molregno" yaml:"molregno"` + + R *moleculeHracClassificationR `boil:"-" json:"-" toml:"-" yaml:"-"` + L moleculeHracClassificationL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var MoleculeHracClassificationColumns = struct { + MolHracID string + HracClassID string + Molregno string +}{ + MolHracID: "mol_hrac_id", + HracClassID: "hrac_class_id", + Molregno: "molregno", +} + +var MoleculeHracClassificationTableColumns = struct { + MolHracID string + HracClassID string + Molregno string +}{ + MolHracID: "molecule_hrac_classification.mol_hrac_id", + HracClassID: "molecule_hrac_classification.hrac_class_id", + Molregno: "molecule_hrac_classification.molregno", +} + +// Generated where + +var MoleculeHracClassificationWhere = struct { + MolHracID whereHelperint64 + HracClassID whereHelperint64 + Molregno whereHelperint64 +}{ + MolHracID: whereHelperint64{field: "\"molecule_hrac_classification\".\"mol_hrac_id\""}, + HracClassID: whereHelperint64{field: "\"molecule_hrac_classification\".\"hrac_class_id\""}, + Molregno: whereHelperint64{field: "\"molecule_hrac_classification\".\"molregno\""}, +} + +// MoleculeHracClassificationRels is where relationship names are stored. +var MoleculeHracClassificationRels = struct { + MolregnoMoleculeDictionary string + HracClass string +}{ + MolregnoMoleculeDictionary: "MolregnoMoleculeDictionary", + HracClass: "HracClass", +} + +// moleculeHracClassificationR is where relationships are stored. +type moleculeHracClassificationR struct { + MolregnoMoleculeDictionary *MoleculeDictionary `boil:"MolregnoMoleculeDictionary" json:"MolregnoMoleculeDictionary" toml:"MolregnoMoleculeDictionary" yaml:"MolregnoMoleculeDictionary"` + HracClass *HracClassification `boil:"HracClass" json:"HracClass" toml:"HracClass" yaml:"HracClass"` +} + +// NewStruct creates a new relationship struct +func (*moleculeHracClassificationR) NewStruct() *moleculeHracClassificationR { + return &moleculeHracClassificationR{} +} + +func (r *moleculeHracClassificationR) GetMolregnoMoleculeDictionary() *MoleculeDictionary { + if r == nil { + return nil + } + return r.MolregnoMoleculeDictionary +} + +func (r *moleculeHracClassificationR) GetHracClass() *HracClassification { + if r == nil { + return nil + } + return r.HracClass +} + +// moleculeHracClassificationL is where Load methods for each relationship are stored. +type moleculeHracClassificationL struct{} + +var ( + moleculeHracClassificationAllColumns = []string{"mol_hrac_id", "hrac_class_id", "molregno"} + moleculeHracClassificationColumnsWithoutDefault = []string{"mol_hrac_id", "hrac_class_id", "molregno"} + moleculeHracClassificationColumnsWithDefault = []string{} + moleculeHracClassificationPrimaryKeyColumns = []string{"mol_hrac_id"} + moleculeHracClassificationGeneratedColumns = []string{} +) + +type ( + // MoleculeHracClassificationSlice is an alias for a slice of pointers to MoleculeHracClassification. + // This should almost always be used instead of []MoleculeHracClassification. + MoleculeHracClassificationSlice []*MoleculeHracClassification + // MoleculeHracClassificationHook is the signature for custom MoleculeHracClassification hook methods + MoleculeHracClassificationHook func(context.Context, boil.ContextExecutor, *MoleculeHracClassification) error + + moleculeHracClassificationQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + moleculeHracClassificationType = reflect.TypeOf(&MoleculeHracClassification{}) + moleculeHracClassificationMapping = queries.MakeStructMapping(moleculeHracClassificationType) + moleculeHracClassificationPrimaryKeyMapping, _ = queries.BindMapping(moleculeHracClassificationType, moleculeHracClassificationMapping, moleculeHracClassificationPrimaryKeyColumns) + moleculeHracClassificationInsertCacheMut sync.RWMutex + moleculeHracClassificationInsertCache = make(map[string]insertCache) + moleculeHracClassificationUpdateCacheMut sync.RWMutex + moleculeHracClassificationUpdateCache = make(map[string]updateCache) + moleculeHracClassificationUpsertCacheMut sync.RWMutex + moleculeHracClassificationUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var moleculeHracClassificationAfterSelectHooks []MoleculeHracClassificationHook + +var moleculeHracClassificationBeforeInsertHooks []MoleculeHracClassificationHook +var moleculeHracClassificationAfterInsertHooks []MoleculeHracClassificationHook + +var moleculeHracClassificationBeforeUpdateHooks []MoleculeHracClassificationHook +var moleculeHracClassificationAfterUpdateHooks []MoleculeHracClassificationHook + +var moleculeHracClassificationBeforeDeleteHooks []MoleculeHracClassificationHook +var moleculeHracClassificationAfterDeleteHooks []MoleculeHracClassificationHook + +var moleculeHracClassificationBeforeUpsertHooks []MoleculeHracClassificationHook +var moleculeHracClassificationAfterUpsertHooks []MoleculeHracClassificationHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *MoleculeHracClassification) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeHracClassificationAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *MoleculeHracClassification) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeHracClassificationBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *MoleculeHracClassification) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeHracClassificationAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *MoleculeHracClassification) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeHracClassificationBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *MoleculeHracClassification) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeHracClassificationAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *MoleculeHracClassification) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeHracClassificationBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *MoleculeHracClassification) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeHracClassificationAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *MoleculeHracClassification) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeHracClassificationBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *MoleculeHracClassification) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeHracClassificationAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddMoleculeHracClassificationHook registers your hook function for all future operations. +func AddMoleculeHracClassificationHook(hookPoint boil.HookPoint, moleculeHracClassificationHook MoleculeHracClassificationHook) { + switch hookPoint { + case boil.AfterSelectHook: + moleculeHracClassificationAfterSelectHooks = append(moleculeHracClassificationAfterSelectHooks, moleculeHracClassificationHook) + case boil.BeforeInsertHook: + moleculeHracClassificationBeforeInsertHooks = append(moleculeHracClassificationBeforeInsertHooks, moleculeHracClassificationHook) + case boil.AfterInsertHook: + moleculeHracClassificationAfterInsertHooks = append(moleculeHracClassificationAfterInsertHooks, moleculeHracClassificationHook) + case boil.BeforeUpdateHook: + moleculeHracClassificationBeforeUpdateHooks = append(moleculeHracClassificationBeforeUpdateHooks, moleculeHracClassificationHook) + case boil.AfterUpdateHook: + moleculeHracClassificationAfterUpdateHooks = append(moleculeHracClassificationAfterUpdateHooks, moleculeHracClassificationHook) + case boil.BeforeDeleteHook: + moleculeHracClassificationBeforeDeleteHooks = append(moleculeHracClassificationBeforeDeleteHooks, moleculeHracClassificationHook) + case boil.AfterDeleteHook: + moleculeHracClassificationAfterDeleteHooks = append(moleculeHracClassificationAfterDeleteHooks, moleculeHracClassificationHook) + case boil.BeforeUpsertHook: + moleculeHracClassificationBeforeUpsertHooks = append(moleculeHracClassificationBeforeUpsertHooks, moleculeHracClassificationHook) + case boil.AfterUpsertHook: + moleculeHracClassificationAfterUpsertHooks = append(moleculeHracClassificationAfterUpsertHooks, moleculeHracClassificationHook) + } +} + +// One returns a single moleculeHracClassification record from the query. +func (q moleculeHracClassificationQuery) One(ctx context.Context, exec boil.ContextExecutor) (*MoleculeHracClassification, error) { + o := &MoleculeHracClassification{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for molecule_hrac_classification") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all MoleculeHracClassification records from the query. +func (q moleculeHracClassificationQuery) All(ctx context.Context, exec boil.ContextExecutor) (MoleculeHracClassificationSlice, error) { + var o []*MoleculeHracClassification + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to MoleculeHracClassification slice") + } + + if len(moleculeHracClassificationAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all MoleculeHracClassification records in the query. +func (q moleculeHracClassificationQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count molecule_hrac_classification rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q moleculeHracClassificationQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if molecule_hrac_classification exists") + } + + return count > 0, nil +} + +// MolregnoMoleculeDictionary pointed to by the foreign key. +func (o *MoleculeHracClassification) MolregnoMoleculeDictionary(mods ...qm.QueryMod) moleculeDictionaryQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"molregno\" = ?", o.Molregno), + } + + queryMods = append(queryMods, mods...) + + return MoleculeDictionaries(queryMods...) +} + +// HracClass pointed to by the foreign key. +func (o *MoleculeHracClassification) HracClass(mods ...qm.QueryMod) hracClassificationQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"hrac_class_id\" = ?", o.HracClassID), + } + + queryMods = append(queryMods, mods...) + + return HracClassifications(queryMods...) +} + +// LoadMolregnoMoleculeDictionary allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (moleculeHracClassificationL) LoadMolregnoMoleculeDictionary(ctx context.Context, e boil.ContextExecutor, singular bool, maybeMoleculeHracClassification interface{}, mods queries.Applicator) error { + var slice []*MoleculeHracClassification + var object *MoleculeHracClassification + + if singular { + object = maybeMoleculeHracClassification.(*MoleculeHracClassification) + } else { + slice = *maybeMoleculeHracClassification.(*[]*MoleculeHracClassification) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &moleculeHracClassificationR{} + } + args = append(args, object.Molregno) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &moleculeHracClassificationR{} + } + + for _, a := range args { + if a == obj.Molregno { + continue Outer + } + } + + args = append(args, obj.Molregno) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`molecule_dictionary`), + qm.WhereIn(`molecule_dictionary.molregno in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load MoleculeDictionary") + } + + var resultSlice []*MoleculeDictionary + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice MoleculeDictionary") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for molecule_dictionary") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for molecule_dictionary") + } + + if len(moleculeHracClassificationAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.MolregnoMoleculeDictionary = foreign + if foreign.R == nil { + foreign.R = &moleculeDictionaryR{} + } + foreign.R.MolregnoMoleculeHracClassifications = append(foreign.R.MolregnoMoleculeHracClassifications, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.Molregno == foreign.Molregno { + local.R.MolregnoMoleculeDictionary = foreign + if foreign.R == nil { + foreign.R = &moleculeDictionaryR{} + } + foreign.R.MolregnoMoleculeHracClassifications = append(foreign.R.MolregnoMoleculeHracClassifications, local) + break + } + } + } + + return nil +} + +// LoadHracClass allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (moleculeHracClassificationL) LoadHracClass(ctx context.Context, e boil.ContextExecutor, singular bool, maybeMoleculeHracClassification interface{}, mods queries.Applicator) error { + var slice []*MoleculeHracClassification + var object *MoleculeHracClassification + + if singular { + object = maybeMoleculeHracClassification.(*MoleculeHracClassification) + } else { + slice = *maybeMoleculeHracClassification.(*[]*MoleculeHracClassification) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &moleculeHracClassificationR{} + } + args = append(args, object.HracClassID) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &moleculeHracClassificationR{} + } + + for _, a := range args { + if a == obj.HracClassID { + continue Outer + } + } + + args = append(args, obj.HracClassID) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`hrac_classification`), + qm.WhereIn(`hrac_classification.hrac_class_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load HracClassification") + } + + var resultSlice []*HracClassification + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice HracClassification") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for hrac_classification") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for hrac_classification") + } + + if len(moleculeHracClassificationAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.HracClass = foreign + if foreign.R == nil { + foreign.R = &hracClassificationR{} + } + foreign.R.HracClassMoleculeHracClassifications = append(foreign.R.HracClassMoleculeHracClassifications, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.HracClassID == foreign.HracClassID { + local.R.HracClass = foreign + if foreign.R == nil { + foreign.R = &hracClassificationR{} + } + foreign.R.HracClassMoleculeHracClassifications = append(foreign.R.HracClassMoleculeHracClassifications, local) + break + } + } + } + + return nil +} + +// SetMolregnoMoleculeDictionary of the moleculeHracClassification to the related item. +// Sets o.R.MolregnoMoleculeDictionary to related. +// Adds o to related.R.MolregnoMoleculeHracClassifications. +func (o *MoleculeHracClassification) SetMolregnoMoleculeDictionary(ctx context.Context, exec boil.ContextExecutor, insert bool, related *MoleculeDictionary) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"molecule_hrac_classification\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"molregno"}), + strmangle.WhereClause("\"", "\"", 0, moleculeHracClassificationPrimaryKeyColumns), + ) + values := []interface{}{related.Molregno, o.MolHracID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.Molregno = related.Molregno + if o.R == nil { + o.R = &moleculeHracClassificationR{ + MolregnoMoleculeDictionary: related, + } + } else { + o.R.MolregnoMoleculeDictionary = related + } + + if related.R == nil { + related.R = &moleculeDictionaryR{ + MolregnoMoleculeHracClassifications: MoleculeHracClassificationSlice{o}, + } + } else { + related.R.MolregnoMoleculeHracClassifications = append(related.R.MolregnoMoleculeHracClassifications, o) + } + + return nil +} + +// SetHracClass of the moleculeHracClassification to the related item. +// Sets o.R.HracClass to related. +// Adds o to related.R.HracClassMoleculeHracClassifications. +func (o *MoleculeHracClassification) SetHracClass(ctx context.Context, exec boil.ContextExecutor, insert bool, related *HracClassification) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"molecule_hrac_classification\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"hrac_class_id"}), + strmangle.WhereClause("\"", "\"", 0, moleculeHracClassificationPrimaryKeyColumns), + ) + values := []interface{}{related.HracClassID, o.MolHracID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.HracClassID = related.HracClassID + if o.R == nil { + o.R = &moleculeHracClassificationR{ + HracClass: related, + } + } else { + o.R.HracClass = related + } + + if related.R == nil { + related.R = &hracClassificationR{ + HracClassMoleculeHracClassifications: MoleculeHracClassificationSlice{o}, + } + } else { + related.R.HracClassMoleculeHracClassifications = append(related.R.HracClassMoleculeHracClassifications, o) + } + + return nil +} + +// MoleculeHracClassifications retrieves all the records using an executor. +func MoleculeHracClassifications(mods ...qm.QueryMod) moleculeHracClassificationQuery { + mods = append(mods, qm.From("\"molecule_hrac_classification\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"molecule_hrac_classification\".*"}) + } + + return moleculeHracClassificationQuery{q} +} + +// FindMoleculeHracClassification retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindMoleculeHracClassification(ctx context.Context, exec boil.ContextExecutor, molHracID int64, selectCols ...string) (*MoleculeHracClassification, error) { + moleculeHracClassificationObj := &MoleculeHracClassification{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"molecule_hrac_classification\" where \"mol_hrac_id\"=?", sel, + ) + + q := queries.Raw(query, molHracID) + + err := q.Bind(ctx, exec, moleculeHracClassificationObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from molecule_hrac_classification") + } + + if err = moleculeHracClassificationObj.doAfterSelectHooks(ctx, exec); err != nil { + return moleculeHracClassificationObj, err + } + + return moleculeHracClassificationObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *MoleculeHracClassification) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no molecule_hrac_classification provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(moleculeHracClassificationColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + moleculeHracClassificationInsertCacheMut.RLock() + cache, cached := moleculeHracClassificationInsertCache[key] + moleculeHracClassificationInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + moleculeHracClassificationAllColumns, + moleculeHracClassificationColumnsWithDefault, + moleculeHracClassificationColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(moleculeHracClassificationType, moleculeHracClassificationMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(moleculeHracClassificationType, moleculeHracClassificationMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"molecule_hrac_classification\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"molecule_hrac_classification\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into molecule_hrac_classification") + } + + if !cached { + moleculeHracClassificationInsertCacheMut.Lock() + moleculeHracClassificationInsertCache[key] = cache + moleculeHracClassificationInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the MoleculeHracClassification. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *MoleculeHracClassification) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + moleculeHracClassificationUpdateCacheMut.RLock() + cache, cached := moleculeHracClassificationUpdateCache[key] + moleculeHracClassificationUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + moleculeHracClassificationAllColumns, + moleculeHracClassificationPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update molecule_hrac_classification, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"molecule_hrac_classification\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, moleculeHracClassificationPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(moleculeHracClassificationType, moleculeHracClassificationMapping, append(wl, moleculeHracClassificationPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update molecule_hrac_classification row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for molecule_hrac_classification") + } + + if !cached { + moleculeHracClassificationUpdateCacheMut.Lock() + moleculeHracClassificationUpdateCache[key] = cache + moleculeHracClassificationUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q moleculeHracClassificationQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for molecule_hrac_classification") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for molecule_hrac_classification") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o MoleculeHracClassificationSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), moleculeHracClassificationPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"molecule_hrac_classification\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, moleculeHracClassificationPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in moleculeHracClassification slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all moleculeHracClassification") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *MoleculeHracClassification) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no molecule_hrac_classification provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(moleculeHracClassificationColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + moleculeHracClassificationUpsertCacheMut.RLock() + cache, cached := moleculeHracClassificationUpsertCache[key] + moleculeHracClassificationUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + moleculeHracClassificationAllColumns, + moleculeHracClassificationColumnsWithDefault, + moleculeHracClassificationColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + moleculeHracClassificationAllColumns, + moleculeHracClassificationPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert molecule_hrac_classification, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(moleculeHracClassificationPrimaryKeyColumns)) + copy(conflict, moleculeHracClassificationPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"molecule_hrac_classification\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(moleculeHracClassificationType, moleculeHracClassificationMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(moleculeHracClassificationType, moleculeHracClassificationMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert molecule_hrac_classification") + } + + if !cached { + moleculeHracClassificationUpsertCacheMut.Lock() + moleculeHracClassificationUpsertCache[key] = cache + moleculeHracClassificationUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single MoleculeHracClassification record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *MoleculeHracClassification) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no MoleculeHracClassification provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), moleculeHracClassificationPrimaryKeyMapping) + sql := "DELETE FROM \"molecule_hrac_classification\" WHERE \"mol_hrac_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from molecule_hrac_classification") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for molecule_hrac_classification") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q moleculeHracClassificationQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no moleculeHracClassificationQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from molecule_hrac_classification") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for molecule_hrac_classification") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o MoleculeHracClassificationSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(moleculeHracClassificationBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), moleculeHracClassificationPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"molecule_hrac_classification\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, moleculeHracClassificationPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from moleculeHracClassification slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for molecule_hrac_classification") + } + + if len(moleculeHracClassificationAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *MoleculeHracClassification) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindMoleculeHracClassification(ctx, exec, o.MolHracID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *MoleculeHracClassificationSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := MoleculeHracClassificationSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), moleculeHracClassificationPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"molecule_hrac_classification\".* FROM \"molecule_hrac_classification\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, moleculeHracClassificationPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in MoleculeHracClassificationSlice") + } + + *o = slice + + return nil +} + +// MoleculeHracClassificationExists checks if the MoleculeHracClassification row exists. +func MoleculeHracClassificationExists(ctx context.Context, exec boil.ContextExecutor, molHracID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"molecule_hrac_classification\" where \"mol_hrac_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, molHracID) + } + row := exec.QueryRowContext(ctx, sql, molHracID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if molecule_hrac_classification exists") + } + + return exists, nil +} diff --git a/models/molecule_irac_classification.go b/models/molecule_irac_classification.go new file mode 100644 index 0000000..01ddb65 --- /dev/null +++ b/models/molecule_irac_classification.go @@ -0,0 +1,1241 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// MoleculeIracClassification is an object representing the database table. +type MoleculeIracClassification struct { + MolIracID int64 `boil:"mol_irac_id" json:"mol_irac_id" toml:"mol_irac_id" yaml:"mol_irac_id"` + IracClassID int64 `boil:"irac_class_id" json:"irac_class_id" toml:"irac_class_id" yaml:"irac_class_id"` + Molregno int64 `boil:"molregno" json:"molregno" toml:"molregno" yaml:"molregno"` + + R *moleculeIracClassificationR `boil:"-" json:"-" toml:"-" yaml:"-"` + L moleculeIracClassificationL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var MoleculeIracClassificationColumns = struct { + MolIracID string + IracClassID string + Molregno string +}{ + MolIracID: "mol_irac_id", + IracClassID: "irac_class_id", + Molregno: "molregno", +} + +var MoleculeIracClassificationTableColumns = struct { + MolIracID string + IracClassID string + Molregno string +}{ + MolIracID: "molecule_irac_classification.mol_irac_id", + IracClassID: "molecule_irac_classification.irac_class_id", + Molregno: "molecule_irac_classification.molregno", +} + +// Generated where + +var MoleculeIracClassificationWhere = struct { + MolIracID whereHelperint64 + IracClassID whereHelperint64 + Molregno whereHelperint64 +}{ + MolIracID: whereHelperint64{field: "\"molecule_irac_classification\".\"mol_irac_id\""}, + IracClassID: whereHelperint64{field: "\"molecule_irac_classification\".\"irac_class_id\""}, + Molregno: whereHelperint64{field: "\"molecule_irac_classification\".\"molregno\""}, +} + +// MoleculeIracClassificationRels is where relationship names are stored. +var MoleculeIracClassificationRels = struct { + MolregnoMoleculeDictionary string + IracClass string +}{ + MolregnoMoleculeDictionary: "MolregnoMoleculeDictionary", + IracClass: "IracClass", +} + +// moleculeIracClassificationR is where relationships are stored. +type moleculeIracClassificationR struct { + MolregnoMoleculeDictionary *MoleculeDictionary `boil:"MolregnoMoleculeDictionary" json:"MolregnoMoleculeDictionary" toml:"MolregnoMoleculeDictionary" yaml:"MolregnoMoleculeDictionary"` + IracClass *IracClassification `boil:"IracClass" json:"IracClass" toml:"IracClass" yaml:"IracClass"` +} + +// NewStruct creates a new relationship struct +func (*moleculeIracClassificationR) NewStruct() *moleculeIracClassificationR { + return &moleculeIracClassificationR{} +} + +func (r *moleculeIracClassificationR) GetMolregnoMoleculeDictionary() *MoleculeDictionary { + if r == nil { + return nil + } + return r.MolregnoMoleculeDictionary +} + +func (r *moleculeIracClassificationR) GetIracClass() *IracClassification { + if r == nil { + return nil + } + return r.IracClass +} + +// moleculeIracClassificationL is where Load methods for each relationship are stored. +type moleculeIracClassificationL struct{} + +var ( + moleculeIracClassificationAllColumns = []string{"mol_irac_id", "irac_class_id", "molregno"} + moleculeIracClassificationColumnsWithoutDefault = []string{"mol_irac_id", "irac_class_id", "molregno"} + moleculeIracClassificationColumnsWithDefault = []string{} + moleculeIracClassificationPrimaryKeyColumns = []string{"mol_irac_id"} + moleculeIracClassificationGeneratedColumns = []string{} +) + +type ( + // MoleculeIracClassificationSlice is an alias for a slice of pointers to MoleculeIracClassification. + // This should almost always be used instead of []MoleculeIracClassification. + MoleculeIracClassificationSlice []*MoleculeIracClassification + // MoleculeIracClassificationHook is the signature for custom MoleculeIracClassification hook methods + MoleculeIracClassificationHook func(context.Context, boil.ContextExecutor, *MoleculeIracClassification) error + + moleculeIracClassificationQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + moleculeIracClassificationType = reflect.TypeOf(&MoleculeIracClassification{}) + moleculeIracClassificationMapping = queries.MakeStructMapping(moleculeIracClassificationType) + moleculeIracClassificationPrimaryKeyMapping, _ = queries.BindMapping(moleculeIracClassificationType, moleculeIracClassificationMapping, moleculeIracClassificationPrimaryKeyColumns) + moleculeIracClassificationInsertCacheMut sync.RWMutex + moleculeIracClassificationInsertCache = make(map[string]insertCache) + moleculeIracClassificationUpdateCacheMut sync.RWMutex + moleculeIracClassificationUpdateCache = make(map[string]updateCache) + moleculeIracClassificationUpsertCacheMut sync.RWMutex + moleculeIracClassificationUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var moleculeIracClassificationAfterSelectHooks []MoleculeIracClassificationHook + +var moleculeIracClassificationBeforeInsertHooks []MoleculeIracClassificationHook +var moleculeIracClassificationAfterInsertHooks []MoleculeIracClassificationHook + +var moleculeIracClassificationBeforeUpdateHooks []MoleculeIracClassificationHook +var moleculeIracClassificationAfterUpdateHooks []MoleculeIracClassificationHook + +var moleculeIracClassificationBeforeDeleteHooks []MoleculeIracClassificationHook +var moleculeIracClassificationAfterDeleteHooks []MoleculeIracClassificationHook + +var moleculeIracClassificationBeforeUpsertHooks []MoleculeIracClassificationHook +var moleculeIracClassificationAfterUpsertHooks []MoleculeIracClassificationHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *MoleculeIracClassification) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeIracClassificationAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *MoleculeIracClassification) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeIracClassificationBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *MoleculeIracClassification) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeIracClassificationAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *MoleculeIracClassification) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeIracClassificationBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *MoleculeIracClassification) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeIracClassificationAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *MoleculeIracClassification) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeIracClassificationBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *MoleculeIracClassification) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeIracClassificationAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *MoleculeIracClassification) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeIracClassificationBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *MoleculeIracClassification) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeIracClassificationAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddMoleculeIracClassificationHook registers your hook function for all future operations. +func AddMoleculeIracClassificationHook(hookPoint boil.HookPoint, moleculeIracClassificationHook MoleculeIracClassificationHook) { + switch hookPoint { + case boil.AfterSelectHook: + moleculeIracClassificationAfterSelectHooks = append(moleculeIracClassificationAfterSelectHooks, moleculeIracClassificationHook) + case boil.BeforeInsertHook: + moleculeIracClassificationBeforeInsertHooks = append(moleculeIracClassificationBeforeInsertHooks, moleculeIracClassificationHook) + case boil.AfterInsertHook: + moleculeIracClassificationAfterInsertHooks = append(moleculeIracClassificationAfterInsertHooks, moleculeIracClassificationHook) + case boil.BeforeUpdateHook: + moleculeIracClassificationBeforeUpdateHooks = append(moleculeIracClassificationBeforeUpdateHooks, moleculeIracClassificationHook) + case boil.AfterUpdateHook: + moleculeIracClassificationAfterUpdateHooks = append(moleculeIracClassificationAfterUpdateHooks, moleculeIracClassificationHook) + case boil.BeforeDeleteHook: + moleculeIracClassificationBeforeDeleteHooks = append(moleculeIracClassificationBeforeDeleteHooks, moleculeIracClassificationHook) + case boil.AfterDeleteHook: + moleculeIracClassificationAfterDeleteHooks = append(moleculeIracClassificationAfterDeleteHooks, moleculeIracClassificationHook) + case boil.BeforeUpsertHook: + moleculeIracClassificationBeforeUpsertHooks = append(moleculeIracClassificationBeforeUpsertHooks, moleculeIracClassificationHook) + case boil.AfterUpsertHook: + moleculeIracClassificationAfterUpsertHooks = append(moleculeIracClassificationAfterUpsertHooks, moleculeIracClassificationHook) + } +} + +// One returns a single moleculeIracClassification record from the query. +func (q moleculeIracClassificationQuery) One(ctx context.Context, exec boil.ContextExecutor) (*MoleculeIracClassification, error) { + o := &MoleculeIracClassification{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for molecule_irac_classification") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all MoleculeIracClassification records from the query. +func (q moleculeIracClassificationQuery) All(ctx context.Context, exec boil.ContextExecutor) (MoleculeIracClassificationSlice, error) { + var o []*MoleculeIracClassification + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to MoleculeIracClassification slice") + } + + if len(moleculeIracClassificationAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all MoleculeIracClassification records in the query. +func (q moleculeIracClassificationQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count molecule_irac_classification rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q moleculeIracClassificationQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if molecule_irac_classification exists") + } + + return count > 0, nil +} + +// MolregnoMoleculeDictionary pointed to by the foreign key. +func (o *MoleculeIracClassification) MolregnoMoleculeDictionary(mods ...qm.QueryMod) moleculeDictionaryQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"molregno\" = ?", o.Molregno), + } + + queryMods = append(queryMods, mods...) + + return MoleculeDictionaries(queryMods...) +} + +// IracClass pointed to by the foreign key. +func (o *MoleculeIracClassification) IracClass(mods ...qm.QueryMod) iracClassificationQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"irac_class_id\" = ?", o.IracClassID), + } + + queryMods = append(queryMods, mods...) + + return IracClassifications(queryMods...) +} + +// LoadMolregnoMoleculeDictionary allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (moleculeIracClassificationL) LoadMolregnoMoleculeDictionary(ctx context.Context, e boil.ContextExecutor, singular bool, maybeMoleculeIracClassification interface{}, mods queries.Applicator) error { + var slice []*MoleculeIracClassification + var object *MoleculeIracClassification + + if singular { + object = maybeMoleculeIracClassification.(*MoleculeIracClassification) + } else { + slice = *maybeMoleculeIracClassification.(*[]*MoleculeIracClassification) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &moleculeIracClassificationR{} + } + args = append(args, object.Molregno) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &moleculeIracClassificationR{} + } + + for _, a := range args { + if a == obj.Molregno { + continue Outer + } + } + + args = append(args, obj.Molregno) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`molecule_dictionary`), + qm.WhereIn(`molecule_dictionary.molregno in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load MoleculeDictionary") + } + + var resultSlice []*MoleculeDictionary + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice MoleculeDictionary") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for molecule_dictionary") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for molecule_dictionary") + } + + if len(moleculeIracClassificationAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.MolregnoMoleculeDictionary = foreign + if foreign.R == nil { + foreign.R = &moleculeDictionaryR{} + } + foreign.R.MolregnoMoleculeIracClassifications = append(foreign.R.MolregnoMoleculeIracClassifications, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.Molregno == foreign.Molregno { + local.R.MolregnoMoleculeDictionary = foreign + if foreign.R == nil { + foreign.R = &moleculeDictionaryR{} + } + foreign.R.MolregnoMoleculeIracClassifications = append(foreign.R.MolregnoMoleculeIracClassifications, local) + break + } + } + } + + return nil +} + +// LoadIracClass allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (moleculeIracClassificationL) LoadIracClass(ctx context.Context, e boil.ContextExecutor, singular bool, maybeMoleculeIracClassification interface{}, mods queries.Applicator) error { + var slice []*MoleculeIracClassification + var object *MoleculeIracClassification + + if singular { + object = maybeMoleculeIracClassification.(*MoleculeIracClassification) + } else { + slice = *maybeMoleculeIracClassification.(*[]*MoleculeIracClassification) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &moleculeIracClassificationR{} + } + args = append(args, object.IracClassID) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &moleculeIracClassificationR{} + } + + for _, a := range args { + if a == obj.IracClassID { + continue Outer + } + } + + args = append(args, obj.IracClassID) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`irac_classification`), + qm.WhereIn(`irac_classification.irac_class_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load IracClassification") + } + + var resultSlice []*IracClassification + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice IracClassification") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for irac_classification") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for irac_classification") + } + + if len(moleculeIracClassificationAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.IracClass = foreign + if foreign.R == nil { + foreign.R = &iracClassificationR{} + } + foreign.R.IracClassMoleculeIracClassifications = append(foreign.R.IracClassMoleculeIracClassifications, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.IracClassID == foreign.IracClassID { + local.R.IracClass = foreign + if foreign.R == nil { + foreign.R = &iracClassificationR{} + } + foreign.R.IracClassMoleculeIracClassifications = append(foreign.R.IracClassMoleculeIracClassifications, local) + break + } + } + } + + return nil +} + +// SetMolregnoMoleculeDictionary of the moleculeIracClassification to the related item. +// Sets o.R.MolregnoMoleculeDictionary to related. +// Adds o to related.R.MolregnoMoleculeIracClassifications. +func (o *MoleculeIracClassification) SetMolregnoMoleculeDictionary(ctx context.Context, exec boil.ContextExecutor, insert bool, related *MoleculeDictionary) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"molecule_irac_classification\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"molregno"}), + strmangle.WhereClause("\"", "\"", 0, moleculeIracClassificationPrimaryKeyColumns), + ) + values := []interface{}{related.Molregno, o.MolIracID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.Molregno = related.Molregno + if o.R == nil { + o.R = &moleculeIracClassificationR{ + MolregnoMoleculeDictionary: related, + } + } else { + o.R.MolregnoMoleculeDictionary = related + } + + if related.R == nil { + related.R = &moleculeDictionaryR{ + MolregnoMoleculeIracClassifications: MoleculeIracClassificationSlice{o}, + } + } else { + related.R.MolregnoMoleculeIracClassifications = append(related.R.MolregnoMoleculeIracClassifications, o) + } + + return nil +} + +// SetIracClass of the moleculeIracClassification to the related item. +// Sets o.R.IracClass to related. +// Adds o to related.R.IracClassMoleculeIracClassifications. +func (o *MoleculeIracClassification) SetIracClass(ctx context.Context, exec boil.ContextExecutor, insert bool, related *IracClassification) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"molecule_irac_classification\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"irac_class_id"}), + strmangle.WhereClause("\"", "\"", 0, moleculeIracClassificationPrimaryKeyColumns), + ) + values := []interface{}{related.IracClassID, o.MolIracID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.IracClassID = related.IracClassID + if o.R == nil { + o.R = &moleculeIracClassificationR{ + IracClass: related, + } + } else { + o.R.IracClass = related + } + + if related.R == nil { + related.R = &iracClassificationR{ + IracClassMoleculeIracClassifications: MoleculeIracClassificationSlice{o}, + } + } else { + related.R.IracClassMoleculeIracClassifications = append(related.R.IracClassMoleculeIracClassifications, o) + } + + return nil +} + +// MoleculeIracClassifications retrieves all the records using an executor. +func MoleculeIracClassifications(mods ...qm.QueryMod) moleculeIracClassificationQuery { + mods = append(mods, qm.From("\"molecule_irac_classification\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"molecule_irac_classification\".*"}) + } + + return moleculeIracClassificationQuery{q} +} + +// FindMoleculeIracClassification retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindMoleculeIracClassification(ctx context.Context, exec boil.ContextExecutor, molIracID int64, selectCols ...string) (*MoleculeIracClassification, error) { + moleculeIracClassificationObj := &MoleculeIracClassification{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"molecule_irac_classification\" where \"mol_irac_id\"=?", sel, + ) + + q := queries.Raw(query, molIracID) + + err := q.Bind(ctx, exec, moleculeIracClassificationObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from molecule_irac_classification") + } + + if err = moleculeIracClassificationObj.doAfterSelectHooks(ctx, exec); err != nil { + return moleculeIracClassificationObj, err + } + + return moleculeIracClassificationObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *MoleculeIracClassification) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no molecule_irac_classification provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(moleculeIracClassificationColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + moleculeIracClassificationInsertCacheMut.RLock() + cache, cached := moleculeIracClassificationInsertCache[key] + moleculeIracClassificationInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + moleculeIracClassificationAllColumns, + moleculeIracClassificationColumnsWithDefault, + moleculeIracClassificationColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(moleculeIracClassificationType, moleculeIracClassificationMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(moleculeIracClassificationType, moleculeIracClassificationMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"molecule_irac_classification\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"molecule_irac_classification\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into molecule_irac_classification") + } + + if !cached { + moleculeIracClassificationInsertCacheMut.Lock() + moleculeIracClassificationInsertCache[key] = cache + moleculeIracClassificationInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the MoleculeIracClassification. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *MoleculeIracClassification) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + moleculeIracClassificationUpdateCacheMut.RLock() + cache, cached := moleculeIracClassificationUpdateCache[key] + moleculeIracClassificationUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + moleculeIracClassificationAllColumns, + moleculeIracClassificationPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update molecule_irac_classification, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"molecule_irac_classification\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, moleculeIracClassificationPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(moleculeIracClassificationType, moleculeIracClassificationMapping, append(wl, moleculeIracClassificationPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update molecule_irac_classification row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for molecule_irac_classification") + } + + if !cached { + moleculeIracClassificationUpdateCacheMut.Lock() + moleculeIracClassificationUpdateCache[key] = cache + moleculeIracClassificationUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q moleculeIracClassificationQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for molecule_irac_classification") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for molecule_irac_classification") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o MoleculeIracClassificationSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), moleculeIracClassificationPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"molecule_irac_classification\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, moleculeIracClassificationPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in moleculeIracClassification slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all moleculeIracClassification") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *MoleculeIracClassification) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no molecule_irac_classification provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(moleculeIracClassificationColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + moleculeIracClassificationUpsertCacheMut.RLock() + cache, cached := moleculeIracClassificationUpsertCache[key] + moleculeIracClassificationUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + moleculeIracClassificationAllColumns, + moleculeIracClassificationColumnsWithDefault, + moleculeIracClassificationColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + moleculeIracClassificationAllColumns, + moleculeIracClassificationPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert molecule_irac_classification, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(moleculeIracClassificationPrimaryKeyColumns)) + copy(conflict, moleculeIracClassificationPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"molecule_irac_classification\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(moleculeIracClassificationType, moleculeIracClassificationMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(moleculeIracClassificationType, moleculeIracClassificationMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert molecule_irac_classification") + } + + if !cached { + moleculeIracClassificationUpsertCacheMut.Lock() + moleculeIracClassificationUpsertCache[key] = cache + moleculeIracClassificationUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single MoleculeIracClassification record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *MoleculeIracClassification) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no MoleculeIracClassification provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), moleculeIracClassificationPrimaryKeyMapping) + sql := "DELETE FROM \"molecule_irac_classification\" WHERE \"mol_irac_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from molecule_irac_classification") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for molecule_irac_classification") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q moleculeIracClassificationQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no moleculeIracClassificationQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from molecule_irac_classification") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for molecule_irac_classification") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o MoleculeIracClassificationSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(moleculeIracClassificationBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), moleculeIracClassificationPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"molecule_irac_classification\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, moleculeIracClassificationPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from moleculeIracClassification slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for molecule_irac_classification") + } + + if len(moleculeIracClassificationAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *MoleculeIracClassification) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindMoleculeIracClassification(ctx, exec, o.MolIracID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *MoleculeIracClassificationSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := MoleculeIracClassificationSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), moleculeIracClassificationPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"molecule_irac_classification\".* FROM \"molecule_irac_classification\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, moleculeIracClassificationPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in MoleculeIracClassificationSlice") + } + + *o = slice + + return nil +} + +// MoleculeIracClassificationExists checks if the MoleculeIracClassification row exists. +func MoleculeIracClassificationExists(ctx context.Context, exec boil.ContextExecutor, molIracID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"molecule_irac_classification\" where \"mol_irac_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, molIracID) + } + row := exec.QueryRowContext(ctx, sql, molIracID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if molecule_irac_classification exists") + } + + return exists, nil +} diff --git a/models/molecule_synonyms.go b/models/molecule_synonyms.go new file mode 100644 index 0000000..ef3d405 --- /dev/null +++ b/models/molecule_synonyms.go @@ -0,0 +1,1293 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// MoleculeSynonym is an object representing the database table. +type MoleculeSynonym struct { + Molregno int64 `boil:"molregno" json:"molregno" toml:"molregno" yaml:"molregno"` + SynType string `boil:"syn_type" json:"syn_type" toml:"syn_type" yaml:"syn_type"` + MolsynID int64 `boil:"molsyn_id" json:"molsyn_id" toml:"molsyn_id" yaml:"molsyn_id"` + ResStemID null.Int64 `boil:"res_stem_id" json:"res_stem_id,omitempty" toml:"res_stem_id" yaml:"res_stem_id,omitempty"` + Synonyms null.String `boil:"synonyms" json:"synonyms,omitempty" toml:"synonyms" yaml:"synonyms,omitempty"` + + R *moleculeSynonymR `boil:"-" json:"-" toml:"-" yaml:"-"` + L moleculeSynonymL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var MoleculeSynonymColumns = struct { + Molregno string + SynType string + MolsynID string + ResStemID string + Synonyms string +}{ + Molregno: "molregno", + SynType: "syn_type", + MolsynID: "molsyn_id", + ResStemID: "res_stem_id", + Synonyms: "synonyms", +} + +var MoleculeSynonymTableColumns = struct { + Molregno string + SynType string + MolsynID string + ResStemID string + Synonyms string +}{ + Molregno: "molecule_synonyms.molregno", + SynType: "molecule_synonyms.syn_type", + MolsynID: "molecule_synonyms.molsyn_id", + ResStemID: "molecule_synonyms.res_stem_id", + Synonyms: "molecule_synonyms.synonyms", +} + +// Generated where + +var MoleculeSynonymWhere = struct { + Molregno whereHelperint64 + SynType whereHelperstring + MolsynID whereHelperint64 + ResStemID whereHelpernull_Int64 + Synonyms whereHelpernull_String +}{ + Molregno: whereHelperint64{field: "\"molecule_synonyms\".\"molregno\""}, + SynType: whereHelperstring{field: "\"molecule_synonyms\".\"syn_type\""}, + MolsynID: whereHelperint64{field: "\"molecule_synonyms\".\"molsyn_id\""}, + ResStemID: whereHelpernull_Int64{field: "\"molecule_synonyms\".\"res_stem_id\""}, + Synonyms: whereHelpernull_String{field: "\"molecule_synonyms\".\"synonyms\""}, +} + +// MoleculeSynonymRels is where relationship names are stored. +var MoleculeSynonymRels = struct { + ResStem string + MolregnoMoleculeDictionary string +}{ + ResStem: "ResStem", + MolregnoMoleculeDictionary: "MolregnoMoleculeDictionary", +} + +// moleculeSynonymR is where relationships are stored. +type moleculeSynonymR struct { + ResStem *ResearchStem `boil:"ResStem" json:"ResStem" toml:"ResStem" yaml:"ResStem"` + MolregnoMoleculeDictionary *MoleculeDictionary `boil:"MolregnoMoleculeDictionary" json:"MolregnoMoleculeDictionary" toml:"MolregnoMoleculeDictionary" yaml:"MolregnoMoleculeDictionary"` +} + +// NewStruct creates a new relationship struct +func (*moleculeSynonymR) NewStruct() *moleculeSynonymR { + return &moleculeSynonymR{} +} + +func (r *moleculeSynonymR) GetResStem() *ResearchStem { + if r == nil { + return nil + } + return r.ResStem +} + +func (r *moleculeSynonymR) GetMolregnoMoleculeDictionary() *MoleculeDictionary { + if r == nil { + return nil + } + return r.MolregnoMoleculeDictionary +} + +// moleculeSynonymL is where Load methods for each relationship are stored. +type moleculeSynonymL struct{} + +var ( + moleculeSynonymAllColumns = []string{"molregno", "syn_type", "molsyn_id", "res_stem_id", "synonyms"} + moleculeSynonymColumnsWithoutDefault = []string{"molregno", "syn_type", "molsyn_id"} + moleculeSynonymColumnsWithDefault = []string{"res_stem_id", "synonyms"} + moleculeSynonymPrimaryKeyColumns = []string{"molsyn_id"} + moleculeSynonymGeneratedColumns = []string{} +) + +type ( + // MoleculeSynonymSlice is an alias for a slice of pointers to MoleculeSynonym. + // This should almost always be used instead of []MoleculeSynonym. + MoleculeSynonymSlice []*MoleculeSynonym + // MoleculeSynonymHook is the signature for custom MoleculeSynonym hook methods + MoleculeSynonymHook func(context.Context, boil.ContextExecutor, *MoleculeSynonym) error + + moleculeSynonymQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + moleculeSynonymType = reflect.TypeOf(&MoleculeSynonym{}) + moleculeSynonymMapping = queries.MakeStructMapping(moleculeSynonymType) + moleculeSynonymPrimaryKeyMapping, _ = queries.BindMapping(moleculeSynonymType, moleculeSynonymMapping, moleculeSynonymPrimaryKeyColumns) + moleculeSynonymInsertCacheMut sync.RWMutex + moleculeSynonymInsertCache = make(map[string]insertCache) + moleculeSynonymUpdateCacheMut sync.RWMutex + moleculeSynonymUpdateCache = make(map[string]updateCache) + moleculeSynonymUpsertCacheMut sync.RWMutex + moleculeSynonymUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var moleculeSynonymAfterSelectHooks []MoleculeSynonymHook + +var moleculeSynonymBeforeInsertHooks []MoleculeSynonymHook +var moleculeSynonymAfterInsertHooks []MoleculeSynonymHook + +var moleculeSynonymBeforeUpdateHooks []MoleculeSynonymHook +var moleculeSynonymAfterUpdateHooks []MoleculeSynonymHook + +var moleculeSynonymBeforeDeleteHooks []MoleculeSynonymHook +var moleculeSynonymAfterDeleteHooks []MoleculeSynonymHook + +var moleculeSynonymBeforeUpsertHooks []MoleculeSynonymHook +var moleculeSynonymAfterUpsertHooks []MoleculeSynonymHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *MoleculeSynonym) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeSynonymAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *MoleculeSynonym) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeSynonymBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *MoleculeSynonym) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeSynonymAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *MoleculeSynonym) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeSynonymBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *MoleculeSynonym) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeSynonymAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *MoleculeSynonym) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeSynonymBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *MoleculeSynonym) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeSynonymAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *MoleculeSynonym) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeSynonymBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *MoleculeSynonym) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range moleculeSynonymAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddMoleculeSynonymHook registers your hook function for all future operations. +func AddMoleculeSynonymHook(hookPoint boil.HookPoint, moleculeSynonymHook MoleculeSynonymHook) { + switch hookPoint { + case boil.AfterSelectHook: + moleculeSynonymAfterSelectHooks = append(moleculeSynonymAfterSelectHooks, moleculeSynonymHook) + case boil.BeforeInsertHook: + moleculeSynonymBeforeInsertHooks = append(moleculeSynonymBeforeInsertHooks, moleculeSynonymHook) + case boil.AfterInsertHook: + moleculeSynonymAfterInsertHooks = append(moleculeSynonymAfterInsertHooks, moleculeSynonymHook) + case boil.BeforeUpdateHook: + moleculeSynonymBeforeUpdateHooks = append(moleculeSynonymBeforeUpdateHooks, moleculeSynonymHook) + case boil.AfterUpdateHook: + moleculeSynonymAfterUpdateHooks = append(moleculeSynonymAfterUpdateHooks, moleculeSynonymHook) + case boil.BeforeDeleteHook: + moleculeSynonymBeforeDeleteHooks = append(moleculeSynonymBeforeDeleteHooks, moleculeSynonymHook) + case boil.AfterDeleteHook: + moleculeSynonymAfterDeleteHooks = append(moleculeSynonymAfterDeleteHooks, moleculeSynonymHook) + case boil.BeforeUpsertHook: + moleculeSynonymBeforeUpsertHooks = append(moleculeSynonymBeforeUpsertHooks, moleculeSynonymHook) + case boil.AfterUpsertHook: + moleculeSynonymAfterUpsertHooks = append(moleculeSynonymAfterUpsertHooks, moleculeSynonymHook) + } +} + +// One returns a single moleculeSynonym record from the query. +func (q moleculeSynonymQuery) One(ctx context.Context, exec boil.ContextExecutor) (*MoleculeSynonym, error) { + o := &MoleculeSynonym{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for molecule_synonyms") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all MoleculeSynonym records from the query. +func (q moleculeSynonymQuery) All(ctx context.Context, exec boil.ContextExecutor) (MoleculeSynonymSlice, error) { + var o []*MoleculeSynonym + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to MoleculeSynonym slice") + } + + if len(moleculeSynonymAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all MoleculeSynonym records in the query. +func (q moleculeSynonymQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count molecule_synonyms rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q moleculeSynonymQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if molecule_synonyms exists") + } + + return count > 0, nil +} + +// ResStem pointed to by the foreign key. +func (o *MoleculeSynonym) ResStem(mods ...qm.QueryMod) researchStemQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"res_stem_id\" = ?", o.ResStemID), + } + + queryMods = append(queryMods, mods...) + + return ResearchStems(queryMods...) +} + +// MolregnoMoleculeDictionary pointed to by the foreign key. +func (o *MoleculeSynonym) MolregnoMoleculeDictionary(mods ...qm.QueryMod) moleculeDictionaryQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"molregno\" = ?", o.Molregno), + } + + queryMods = append(queryMods, mods...) + + return MoleculeDictionaries(queryMods...) +} + +// LoadResStem allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (moleculeSynonymL) LoadResStem(ctx context.Context, e boil.ContextExecutor, singular bool, maybeMoleculeSynonym interface{}, mods queries.Applicator) error { + var slice []*MoleculeSynonym + var object *MoleculeSynonym + + if singular { + object = maybeMoleculeSynonym.(*MoleculeSynonym) + } else { + slice = *maybeMoleculeSynonym.(*[]*MoleculeSynonym) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &moleculeSynonymR{} + } + if !queries.IsNil(object.ResStemID) { + args = append(args, object.ResStemID) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &moleculeSynonymR{} + } + + for _, a := range args { + if queries.Equal(a, obj.ResStemID) { + continue Outer + } + } + + if !queries.IsNil(obj.ResStemID) { + args = append(args, obj.ResStemID) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`research_stem`), + qm.WhereIn(`research_stem.res_stem_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load ResearchStem") + } + + var resultSlice []*ResearchStem + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice ResearchStem") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for research_stem") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for research_stem") + } + + if len(moleculeSynonymAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.ResStem = foreign + if foreign.R == nil { + foreign.R = &researchStemR{} + } + foreign.R.ResStemMoleculeSynonyms = append(foreign.R.ResStemMoleculeSynonyms, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.ResStemID, foreign.ResStemID) { + local.R.ResStem = foreign + if foreign.R == nil { + foreign.R = &researchStemR{} + } + foreign.R.ResStemMoleculeSynonyms = append(foreign.R.ResStemMoleculeSynonyms, local) + break + } + } + } + + return nil +} + +// LoadMolregnoMoleculeDictionary allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (moleculeSynonymL) LoadMolregnoMoleculeDictionary(ctx context.Context, e boil.ContextExecutor, singular bool, maybeMoleculeSynonym interface{}, mods queries.Applicator) error { + var slice []*MoleculeSynonym + var object *MoleculeSynonym + + if singular { + object = maybeMoleculeSynonym.(*MoleculeSynonym) + } else { + slice = *maybeMoleculeSynonym.(*[]*MoleculeSynonym) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &moleculeSynonymR{} + } + args = append(args, object.Molregno) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &moleculeSynonymR{} + } + + for _, a := range args { + if a == obj.Molregno { + continue Outer + } + } + + args = append(args, obj.Molregno) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`molecule_dictionary`), + qm.WhereIn(`molecule_dictionary.molregno in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load MoleculeDictionary") + } + + var resultSlice []*MoleculeDictionary + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice MoleculeDictionary") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for molecule_dictionary") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for molecule_dictionary") + } + + if len(moleculeSynonymAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.MolregnoMoleculeDictionary = foreign + if foreign.R == nil { + foreign.R = &moleculeDictionaryR{} + } + foreign.R.MolregnoMoleculeSynonyms = append(foreign.R.MolregnoMoleculeSynonyms, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.Molregno == foreign.Molregno { + local.R.MolregnoMoleculeDictionary = foreign + if foreign.R == nil { + foreign.R = &moleculeDictionaryR{} + } + foreign.R.MolregnoMoleculeSynonyms = append(foreign.R.MolregnoMoleculeSynonyms, local) + break + } + } + } + + return nil +} + +// SetResStem of the moleculeSynonym to the related item. +// Sets o.R.ResStem to related. +// Adds o to related.R.ResStemMoleculeSynonyms. +func (o *MoleculeSynonym) SetResStem(ctx context.Context, exec boil.ContextExecutor, insert bool, related *ResearchStem) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"molecule_synonyms\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"res_stem_id"}), + strmangle.WhereClause("\"", "\"", 0, moleculeSynonymPrimaryKeyColumns), + ) + values := []interface{}{related.ResStemID, o.MolsynID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.ResStemID, related.ResStemID) + if o.R == nil { + o.R = &moleculeSynonymR{ + ResStem: related, + } + } else { + o.R.ResStem = related + } + + if related.R == nil { + related.R = &researchStemR{ + ResStemMoleculeSynonyms: MoleculeSynonymSlice{o}, + } + } else { + related.R.ResStemMoleculeSynonyms = append(related.R.ResStemMoleculeSynonyms, o) + } + + return nil +} + +// RemoveResStem relationship. +// Sets o.R.ResStem to nil. +// Removes o from all passed in related items' relationships struct. +func (o *MoleculeSynonym) RemoveResStem(ctx context.Context, exec boil.ContextExecutor, related *ResearchStem) error { + var err error + + queries.SetScanner(&o.ResStemID, nil) + if _, err = o.Update(ctx, exec, boil.Whitelist("res_stem_id")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.ResStem = nil + } + if related == nil || related.R == nil { + return nil + } + + for i, ri := range related.R.ResStemMoleculeSynonyms { + if queries.Equal(o.ResStemID, ri.ResStemID) { + continue + } + + ln := len(related.R.ResStemMoleculeSynonyms) + if ln > 1 && i < ln-1 { + related.R.ResStemMoleculeSynonyms[i] = related.R.ResStemMoleculeSynonyms[ln-1] + } + related.R.ResStemMoleculeSynonyms = related.R.ResStemMoleculeSynonyms[:ln-1] + break + } + return nil +} + +// SetMolregnoMoleculeDictionary of the moleculeSynonym to the related item. +// Sets o.R.MolregnoMoleculeDictionary to related. +// Adds o to related.R.MolregnoMoleculeSynonyms. +func (o *MoleculeSynonym) SetMolregnoMoleculeDictionary(ctx context.Context, exec boil.ContextExecutor, insert bool, related *MoleculeDictionary) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"molecule_synonyms\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"molregno"}), + strmangle.WhereClause("\"", "\"", 0, moleculeSynonymPrimaryKeyColumns), + ) + values := []interface{}{related.Molregno, o.MolsynID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.Molregno = related.Molregno + if o.R == nil { + o.R = &moleculeSynonymR{ + MolregnoMoleculeDictionary: related, + } + } else { + o.R.MolregnoMoleculeDictionary = related + } + + if related.R == nil { + related.R = &moleculeDictionaryR{ + MolregnoMoleculeSynonyms: MoleculeSynonymSlice{o}, + } + } else { + related.R.MolregnoMoleculeSynonyms = append(related.R.MolregnoMoleculeSynonyms, o) + } + + return nil +} + +// MoleculeSynonyms retrieves all the records using an executor. +func MoleculeSynonyms(mods ...qm.QueryMod) moleculeSynonymQuery { + mods = append(mods, qm.From("\"molecule_synonyms\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"molecule_synonyms\".*"}) + } + + return moleculeSynonymQuery{q} +} + +// FindMoleculeSynonym retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindMoleculeSynonym(ctx context.Context, exec boil.ContextExecutor, molsynID int64, selectCols ...string) (*MoleculeSynonym, error) { + moleculeSynonymObj := &MoleculeSynonym{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"molecule_synonyms\" where \"molsyn_id\"=?", sel, + ) + + q := queries.Raw(query, molsynID) + + err := q.Bind(ctx, exec, moleculeSynonymObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from molecule_synonyms") + } + + if err = moleculeSynonymObj.doAfterSelectHooks(ctx, exec); err != nil { + return moleculeSynonymObj, err + } + + return moleculeSynonymObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *MoleculeSynonym) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no molecule_synonyms provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(moleculeSynonymColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + moleculeSynonymInsertCacheMut.RLock() + cache, cached := moleculeSynonymInsertCache[key] + moleculeSynonymInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + moleculeSynonymAllColumns, + moleculeSynonymColumnsWithDefault, + moleculeSynonymColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(moleculeSynonymType, moleculeSynonymMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(moleculeSynonymType, moleculeSynonymMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"molecule_synonyms\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"molecule_synonyms\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into molecule_synonyms") + } + + if !cached { + moleculeSynonymInsertCacheMut.Lock() + moleculeSynonymInsertCache[key] = cache + moleculeSynonymInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the MoleculeSynonym. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *MoleculeSynonym) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + moleculeSynonymUpdateCacheMut.RLock() + cache, cached := moleculeSynonymUpdateCache[key] + moleculeSynonymUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + moleculeSynonymAllColumns, + moleculeSynonymPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update molecule_synonyms, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"molecule_synonyms\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, moleculeSynonymPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(moleculeSynonymType, moleculeSynonymMapping, append(wl, moleculeSynonymPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update molecule_synonyms row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for molecule_synonyms") + } + + if !cached { + moleculeSynonymUpdateCacheMut.Lock() + moleculeSynonymUpdateCache[key] = cache + moleculeSynonymUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q moleculeSynonymQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for molecule_synonyms") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for molecule_synonyms") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o MoleculeSynonymSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), moleculeSynonymPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"molecule_synonyms\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, moleculeSynonymPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in moleculeSynonym slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all moleculeSynonym") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *MoleculeSynonym) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no molecule_synonyms provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(moleculeSynonymColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + moleculeSynonymUpsertCacheMut.RLock() + cache, cached := moleculeSynonymUpsertCache[key] + moleculeSynonymUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + moleculeSynonymAllColumns, + moleculeSynonymColumnsWithDefault, + moleculeSynonymColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + moleculeSynonymAllColumns, + moleculeSynonymPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert molecule_synonyms, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(moleculeSynonymPrimaryKeyColumns)) + copy(conflict, moleculeSynonymPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"molecule_synonyms\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(moleculeSynonymType, moleculeSynonymMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(moleculeSynonymType, moleculeSynonymMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert molecule_synonyms") + } + + if !cached { + moleculeSynonymUpsertCacheMut.Lock() + moleculeSynonymUpsertCache[key] = cache + moleculeSynonymUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single MoleculeSynonym record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *MoleculeSynonym) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no MoleculeSynonym provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), moleculeSynonymPrimaryKeyMapping) + sql := "DELETE FROM \"molecule_synonyms\" WHERE \"molsyn_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from molecule_synonyms") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for molecule_synonyms") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q moleculeSynonymQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no moleculeSynonymQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from molecule_synonyms") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for molecule_synonyms") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o MoleculeSynonymSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(moleculeSynonymBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), moleculeSynonymPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"molecule_synonyms\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, moleculeSynonymPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from moleculeSynonym slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for molecule_synonyms") + } + + if len(moleculeSynonymAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *MoleculeSynonym) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindMoleculeSynonym(ctx, exec, o.MolsynID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *MoleculeSynonymSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := MoleculeSynonymSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), moleculeSynonymPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"molecule_synonyms\".* FROM \"molecule_synonyms\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, moleculeSynonymPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in MoleculeSynonymSlice") + } + + *o = slice + + return nil +} + +// MoleculeSynonymExists checks if the MoleculeSynonym row exists. +func MoleculeSynonymExists(ctx context.Context, exec boil.ContextExecutor, molsynID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"molecule_synonyms\" where \"molsyn_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, molsynID) + } + row := exec.QueryRowContext(ctx, sql, molsynID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if molecule_synonyms exists") + } + + return exists, nil +} diff --git a/models/organism_class.go b/models/organism_class.go new file mode 100644 index 0000000..7b1249c --- /dev/null +++ b/models/organism_class.go @@ -0,0 +1,911 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// OrganismClass is an object representing the database table. +type OrganismClass struct { + OcID int64 `boil:"oc_id" json:"oc_id" toml:"oc_id" yaml:"oc_id"` + TaxID null.Int64 `boil:"tax_id" json:"tax_id,omitempty" toml:"tax_id" yaml:"tax_id,omitempty"` + L1 null.String `boil:"l1" json:"l1,omitempty" toml:"l1" yaml:"l1,omitempty"` + L2 null.String `boil:"l2" json:"l2,omitempty" toml:"l2" yaml:"l2,omitempty"` + L3 null.String `boil:"l3" json:"l3,omitempty" toml:"l3" yaml:"l3,omitempty"` + + R *organismClassR `boil:"-" json:"-" toml:"-" yaml:"-"` + L organismClassL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var OrganismClassColumns = struct { + OcID string + TaxID string + L1 string + L2 string + L3 string +}{ + OcID: "oc_id", + TaxID: "tax_id", + L1: "l1", + L2: "l2", + L3: "l3", +} + +var OrganismClassTableColumns = struct { + OcID string + TaxID string + L1 string + L2 string + L3 string +}{ + OcID: "organism_class.oc_id", + TaxID: "organism_class.tax_id", + L1: "organism_class.l1", + L2: "organism_class.l2", + L3: "organism_class.l3", +} + +// Generated where + +var OrganismClassWhere = struct { + OcID whereHelperint64 + TaxID whereHelpernull_Int64 + L1 whereHelpernull_String + L2 whereHelpernull_String + L3 whereHelpernull_String +}{ + OcID: whereHelperint64{field: "\"organism_class\".\"oc_id\""}, + TaxID: whereHelpernull_Int64{field: "\"organism_class\".\"tax_id\""}, + L1: whereHelpernull_String{field: "\"organism_class\".\"l1\""}, + L2: whereHelpernull_String{field: "\"organism_class\".\"l2\""}, + L3: whereHelpernull_String{field: "\"organism_class\".\"l3\""}, +} + +// OrganismClassRels is where relationship names are stored. +var OrganismClassRels = struct { +}{} + +// organismClassR is where relationships are stored. +type organismClassR struct { +} + +// NewStruct creates a new relationship struct +func (*organismClassR) NewStruct() *organismClassR { + return &organismClassR{} +} + +// organismClassL is where Load methods for each relationship are stored. +type organismClassL struct{} + +var ( + organismClassAllColumns = []string{"oc_id", "tax_id", "l1", "l2", "l3"} + organismClassColumnsWithoutDefault = []string{"oc_id"} + organismClassColumnsWithDefault = []string{"tax_id", "l1", "l2", "l3"} + organismClassPrimaryKeyColumns = []string{"oc_id"} + organismClassGeneratedColumns = []string{} +) + +type ( + // OrganismClassSlice is an alias for a slice of pointers to OrganismClass. + // This should almost always be used instead of []OrganismClass. + OrganismClassSlice []*OrganismClass + // OrganismClassHook is the signature for custom OrganismClass hook methods + OrganismClassHook func(context.Context, boil.ContextExecutor, *OrganismClass) error + + organismClassQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + organismClassType = reflect.TypeOf(&OrganismClass{}) + organismClassMapping = queries.MakeStructMapping(organismClassType) + organismClassPrimaryKeyMapping, _ = queries.BindMapping(organismClassType, organismClassMapping, organismClassPrimaryKeyColumns) + organismClassInsertCacheMut sync.RWMutex + organismClassInsertCache = make(map[string]insertCache) + organismClassUpdateCacheMut sync.RWMutex + organismClassUpdateCache = make(map[string]updateCache) + organismClassUpsertCacheMut sync.RWMutex + organismClassUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var organismClassAfterSelectHooks []OrganismClassHook + +var organismClassBeforeInsertHooks []OrganismClassHook +var organismClassAfterInsertHooks []OrganismClassHook + +var organismClassBeforeUpdateHooks []OrganismClassHook +var organismClassAfterUpdateHooks []OrganismClassHook + +var organismClassBeforeDeleteHooks []OrganismClassHook +var organismClassAfterDeleteHooks []OrganismClassHook + +var organismClassBeforeUpsertHooks []OrganismClassHook +var organismClassAfterUpsertHooks []OrganismClassHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *OrganismClass) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range organismClassAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *OrganismClass) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range organismClassBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *OrganismClass) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range organismClassAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *OrganismClass) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range organismClassBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *OrganismClass) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range organismClassAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *OrganismClass) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range organismClassBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *OrganismClass) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range organismClassAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *OrganismClass) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range organismClassBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *OrganismClass) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range organismClassAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddOrganismClassHook registers your hook function for all future operations. +func AddOrganismClassHook(hookPoint boil.HookPoint, organismClassHook OrganismClassHook) { + switch hookPoint { + case boil.AfterSelectHook: + organismClassAfterSelectHooks = append(organismClassAfterSelectHooks, organismClassHook) + case boil.BeforeInsertHook: + organismClassBeforeInsertHooks = append(organismClassBeforeInsertHooks, organismClassHook) + case boil.AfterInsertHook: + organismClassAfterInsertHooks = append(organismClassAfterInsertHooks, organismClassHook) + case boil.BeforeUpdateHook: + organismClassBeforeUpdateHooks = append(organismClassBeforeUpdateHooks, organismClassHook) + case boil.AfterUpdateHook: + organismClassAfterUpdateHooks = append(organismClassAfterUpdateHooks, organismClassHook) + case boil.BeforeDeleteHook: + organismClassBeforeDeleteHooks = append(organismClassBeforeDeleteHooks, organismClassHook) + case boil.AfterDeleteHook: + organismClassAfterDeleteHooks = append(organismClassAfterDeleteHooks, organismClassHook) + case boil.BeforeUpsertHook: + organismClassBeforeUpsertHooks = append(organismClassBeforeUpsertHooks, organismClassHook) + case boil.AfterUpsertHook: + organismClassAfterUpsertHooks = append(organismClassAfterUpsertHooks, organismClassHook) + } +} + +// One returns a single organismClass record from the query. +func (q organismClassQuery) One(ctx context.Context, exec boil.ContextExecutor) (*OrganismClass, error) { + o := &OrganismClass{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for organism_class") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all OrganismClass records from the query. +func (q organismClassQuery) All(ctx context.Context, exec boil.ContextExecutor) (OrganismClassSlice, error) { + var o []*OrganismClass + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to OrganismClass slice") + } + + if len(organismClassAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all OrganismClass records in the query. +func (q organismClassQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count organism_class rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q organismClassQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if organism_class exists") + } + + return count > 0, nil +} + +// OrganismClasses retrieves all the records using an executor. +func OrganismClasses(mods ...qm.QueryMod) organismClassQuery { + mods = append(mods, qm.From("\"organism_class\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"organism_class\".*"}) + } + + return organismClassQuery{q} +} + +// FindOrganismClass retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindOrganismClass(ctx context.Context, exec boil.ContextExecutor, ocID int64, selectCols ...string) (*OrganismClass, error) { + organismClassObj := &OrganismClass{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"organism_class\" where \"oc_id\"=?", sel, + ) + + q := queries.Raw(query, ocID) + + err := q.Bind(ctx, exec, organismClassObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from organism_class") + } + + if err = organismClassObj.doAfterSelectHooks(ctx, exec); err != nil { + return organismClassObj, err + } + + return organismClassObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *OrganismClass) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no organism_class provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(organismClassColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + organismClassInsertCacheMut.RLock() + cache, cached := organismClassInsertCache[key] + organismClassInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + organismClassAllColumns, + organismClassColumnsWithDefault, + organismClassColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(organismClassType, organismClassMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(organismClassType, organismClassMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"organism_class\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"organism_class\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into organism_class") + } + + if !cached { + organismClassInsertCacheMut.Lock() + organismClassInsertCache[key] = cache + organismClassInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the OrganismClass. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *OrganismClass) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + organismClassUpdateCacheMut.RLock() + cache, cached := organismClassUpdateCache[key] + organismClassUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + organismClassAllColumns, + organismClassPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update organism_class, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"organism_class\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, organismClassPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(organismClassType, organismClassMapping, append(wl, organismClassPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update organism_class row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for organism_class") + } + + if !cached { + organismClassUpdateCacheMut.Lock() + organismClassUpdateCache[key] = cache + organismClassUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q organismClassQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for organism_class") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for organism_class") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o OrganismClassSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), organismClassPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"organism_class\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, organismClassPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in organismClass slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all organismClass") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *OrganismClass) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no organism_class provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(organismClassColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + organismClassUpsertCacheMut.RLock() + cache, cached := organismClassUpsertCache[key] + organismClassUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + organismClassAllColumns, + organismClassColumnsWithDefault, + organismClassColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + organismClassAllColumns, + organismClassPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert organism_class, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(organismClassPrimaryKeyColumns)) + copy(conflict, organismClassPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"organism_class\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(organismClassType, organismClassMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(organismClassType, organismClassMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert organism_class") + } + + if !cached { + organismClassUpsertCacheMut.Lock() + organismClassUpsertCache[key] = cache + organismClassUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single OrganismClass record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *OrganismClass) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no OrganismClass provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), organismClassPrimaryKeyMapping) + sql := "DELETE FROM \"organism_class\" WHERE \"oc_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from organism_class") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for organism_class") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q organismClassQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no organismClassQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from organism_class") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for organism_class") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o OrganismClassSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(organismClassBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), organismClassPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"organism_class\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, organismClassPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from organismClass slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for organism_class") + } + + if len(organismClassAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *OrganismClass) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindOrganismClass(ctx, exec, o.OcID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *OrganismClassSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := OrganismClassSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), organismClassPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"organism_class\".* FROM \"organism_class\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, organismClassPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in OrganismClassSlice") + } + + *o = slice + + return nil +} + +// OrganismClassExists checks if the OrganismClass row exists. +func OrganismClassExists(ctx context.Context, exec boil.ContextExecutor, ocID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"organism_class\" where \"oc_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, ocID) + } + row := exec.QueryRowContext(ctx, sql, ocID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if organism_class exists") + } + + return exists, nil +} diff --git a/models/patent_use_codes.go b/models/patent_use_codes.go new file mode 100644 index 0000000..3012aa7 --- /dev/null +++ b/models/patent_use_codes.go @@ -0,0 +1,1139 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// PatentUseCode is an object representing the database table. +type PatentUseCode struct { + PatentUseCode string `boil:"patent_use_code" json:"patent_use_code" toml:"patent_use_code" yaml:"patent_use_code"` + Definition string `boil:"definition" json:"definition" toml:"definition" yaml:"definition"` + + R *patentUseCodeR `boil:"-" json:"-" toml:"-" yaml:"-"` + L patentUseCodeL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var PatentUseCodeColumns = struct { + PatentUseCode string + Definition string +}{ + PatentUseCode: "patent_use_code", + Definition: "definition", +} + +var PatentUseCodeTableColumns = struct { + PatentUseCode string + Definition string +}{ + PatentUseCode: "patent_use_codes.patent_use_code", + Definition: "patent_use_codes.definition", +} + +// Generated where + +var PatentUseCodeWhere = struct { + PatentUseCode whereHelperstring + Definition whereHelperstring +}{ + PatentUseCode: whereHelperstring{field: "\"patent_use_codes\".\"patent_use_code\""}, + Definition: whereHelperstring{field: "\"patent_use_codes\".\"definition\""}, +} + +// PatentUseCodeRels is where relationship names are stored. +var PatentUseCodeRels = struct { + ProductPatents string +}{ + ProductPatents: "ProductPatents", +} + +// patentUseCodeR is where relationships are stored. +type patentUseCodeR struct { + ProductPatents ProductPatentSlice `boil:"ProductPatents" json:"ProductPatents" toml:"ProductPatents" yaml:"ProductPatents"` +} + +// NewStruct creates a new relationship struct +func (*patentUseCodeR) NewStruct() *patentUseCodeR { + return &patentUseCodeR{} +} + +func (r *patentUseCodeR) GetProductPatents() ProductPatentSlice { + if r == nil { + return nil + } + return r.ProductPatents +} + +// patentUseCodeL is where Load methods for each relationship are stored. +type patentUseCodeL struct{} + +var ( + patentUseCodeAllColumns = []string{"patent_use_code", "definition"} + patentUseCodeColumnsWithoutDefault = []string{"patent_use_code", "definition"} + patentUseCodeColumnsWithDefault = []string{} + patentUseCodePrimaryKeyColumns = []string{"patent_use_code"} + patentUseCodeGeneratedColumns = []string{} +) + +type ( + // PatentUseCodeSlice is an alias for a slice of pointers to PatentUseCode. + // This should almost always be used instead of []PatentUseCode. + PatentUseCodeSlice []*PatentUseCode + // PatentUseCodeHook is the signature for custom PatentUseCode hook methods + PatentUseCodeHook func(context.Context, boil.ContextExecutor, *PatentUseCode) error + + patentUseCodeQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + patentUseCodeType = reflect.TypeOf(&PatentUseCode{}) + patentUseCodeMapping = queries.MakeStructMapping(patentUseCodeType) + patentUseCodePrimaryKeyMapping, _ = queries.BindMapping(patentUseCodeType, patentUseCodeMapping, patentUseCodePrimaryKeyColumns) + patentUseCodeInsertCacheMut sync.RWMutex + patentUseCodeInsertCache = make(map[string]insertCache) + patentUseCodeUpdateCacheMut sync.RWMutex + patentUseCodeUpdateCache = make(map[string]updateCache) + patentUseCodeUpsertCacheMut sync.RWMutex + patentUseCodeUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var patentUseCodeAfterSelectHooks []PatentUseCodeHook + +var patentUseCodeBeforeInsertHooks []PatentUseCodeHook +var patentUseCodeAfterInsertHooks []PatentUseCodeHook + +var patentUseCodeBeforeUpdateHooks []PatentUseCodeHook +var patentUseCodeAfterUpdateHooks []PatentUseCodeHook + +var patentUseCodeBeforeDeleteHooks []PatentUseCodeHook +var patentUseCodeAfterDeleteHooks []PatentUseCodeHook + +var patentUseCodeBeforeUpsertHooks []PatentUseCodeHook +var patentUseCodeAfterUpsertHooks []PatentUseCodeHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *PatentUseCode) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range patentUseCodeAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *PatentUseCode) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range patentUseCodeBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *PatentUseCode) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range patentUseCodeAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *PatentUseCode) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range patentUseCodeBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *PatentUseCode) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range patentUseCodeAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *PatentUseCode) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range patentUseCodeBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *PatentUseCode) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range patentUseCodeAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *PatentUseCode) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range patentUseCodeBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *PatentUseCode) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range patentUseCodeAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddPatentUseCodeHook registers your hook function for all future operations. +func AddPatentUseCodeHook(hookPoint boil.HookPoint, patentUseCodeHook PatentUseCodeHook) { + switch hookPoint { + case boil.AfterSelectHook: + patentUseCodeAfterSelectHooks = append(patentUseCodeAfterSelectHooks, patentUseCodeHook) + case boil.BeforeInsertHook: + patentUseCodeBeforeInsertHooks = append(patentUseCodeBeforeInsertHooks, patentUseCodeHook) + case boil.AfterInsertHook: + patentUseCodeAfterInsertHooks = append(patentUseCodeAfterInsertHooks, patentUseCodeHook) + case boil.BeforeUpdateHook: + patentUseCodeBeforeUpdateHooks = append(patentUseCodeBeforeUpdateHooks, patentUseCodeHook) + case boil.AfterUpdateHook: + patentUseCodeAfterUpdateHooks = append(patentUseCodeAfterUpdateHooks, patentUseCodeHook) + case boil.BeforeDeleteHook: + patentUseCodeBeforeDeleteHooks = append(patentUseCodeBeforeDeleteHooks, patentUseCodeHook) + case boil.AfterDeleteHook: + patentUseCodeAfterDeleteHooks = append(patentUseCodeAfterDeleteHooks, patentUseCodeHook) + case boil.BeforeUpsertHook: + patentUseCodeBeforeUpsertHooks = append(patentUseCodeBeforeUpsertHooks, patentUseCodeHook) + case boil.AfterUpsertHook: + patentUseCodeAfterUpsertHooks = append(patentUseCodeAfterUpsertHooks, patentUseCodeHook) + } +} + +// One returns a single patentUseCode record from the query. +func (q patentUseCodeQuery) One(ctx context.Context, exec boil.ContextExecutor) (*PatentUseCode, error) { + o := &PatentUseCode{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for patent_use_codes") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all PatentUseCode records from the query. +func (q patentUseCodeQuery) All(ctx context.Context, exec boil.ContextExecutor) (PatentUseCodeSlice, error) { + var o []*PatentUseCode + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to PatentUseCode slice") + } + + if len(patentUseCodeAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all PatentUseCode records in the query. +func (q patentUseCodeQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count patent_use_codes rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q patentUseCodeQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if patent_use_codes exists") + } + + return count > 0, nil +} + +// ProductPatents retrieves all the product_patent's ProductPatents with an executor. +func (o *PatentUseCode) ProductPatents(mods ...qm.QueryMod) productPatentQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"product_patents\".\"patent_use_code\"=?", o.PatentUseCode), + ) + + return ProductPatents(queryMods...) +} + +// LoadProductPatents allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (patentUseCodeL) LoadProductPatents(ctx context.Context, e boil.ContextExecutor, singular bool, maybePatentUseCode interface{}, mods queries.Applicator) error { + var slice []*PatentUseCode + var object *PatentUseCode + + if singular { + object = maybePatentUseCode.(*PatentUseCode) + } else { + slice = *maybePatentUseCode.(*[]*PatentUseCode) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &patentUseCodeR{} + } + args = append(args, object.PatentUseCode) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &patentUseCodeR{} + } + + for _, a := range args { + if queries.Equal(a, obj.PatentUseCode) { + continue Outer + } + } + + args = append(args, obj.PatentUseCode) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`product_patents`), + qm.WhereIn(`product_patents.patent_use_code in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load product_patents") + } + + var resultSlice []*ProductPatent + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice product_patents") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on product_patents") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for product_patents") + } + + if len(productPatentAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.ProductPatents = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &productPatentR{} + } + foreign.R.ProductPatentPatentUseCode = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.PatentUseCode, foreign.PatentUseCode) { + local.R.ProductPatents = append(local.R.ProductPatents, foreign) + if foreign.R == nil { + foreign.R = &productPatentR{} + } + foreign.R.ProductPatentPatentUseCode = local + break + } + } + } + + return nil +} + +// AddProductPatents adds the given related objects to the existing relationships +// of the patent_use_code, optionally inserting them as new records. +// Appends related to o.R.ProductPatents. +// Sets related.R.ProductPatentPatentUseCode appropriately. +func (o *PatentUseCode) AddProductPatents(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*ProductPatent) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.PatentUseCode, o.PatentUseCode) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"product_patents\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"patent_use_code"}), + strmangle.WhereClause("\"", "\"", 0, productPatentPrimaryKeyColumns), + ) + values := []interface{}{o.PatentUseCode, rel.ProdPatID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.PatentUseCode, o.PatentUseCode) + } + } + + if o.R == nil { + o.R = &patentUseCodeR{ + ProductPatents: related, + } + } else { + o.R.ProductPatents = append(o.R.ProductPatents, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &productPatentR{ + ProductPatentPatentUseCode: o, + } + } else { + rel.R.ProductPatentPatentUseCode = o + } + } + return nil +} + +// SetProductPatents removes all previously related items of the +// patent_use_code replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.ProductPatentPatentUseCode's ProductPatents accordingly. +// Replaces o.R.ProductPatents with related. +// Sets related.R.ProductPatentPatentUseCode's ProductPatents accordingly. +func (o *PatentUseCode) SetProductPatents(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*ProductPatent) error { + query := "update \"product_patents\" set \"patent_use_code\" = null where \"patent_use_code\" = ?" + values := []interface{}{o.PatentUseCode} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + if o.R != nil { + for _, rel := range o.R.ProductPatents { + queries.SetScanner(&rel.PatentUseCode, nil) + if rel.R == nil { + continue + } + + rel.R.ProductPatentPatentUseCode = nil + } + o.R.ProductPatents = nil + } + + return o.AddProductPatents(ctx, exec, insert, related...) +} + +// RemoveProductPatents relationships from objects passed in. +// Removes related items from R.ProductPatents (uses pointer comparison, removal does not keep order) +// Sets related.R.ProductPatentPatentUseCode. +func (o *PatentUseCode) RemoveProductPatents(ctx context.Context, exec boil.ContextExecutor, related ...*ProductPatent) error { + if len(related) == 0 { + return nil + } + + var err error + for _, rel := range related { + queries.SetScanner(&rel.PatentUseCode, nil) + if rel.R != nil { + rel.R.ProductPatentPatentUseCode = nil + } + if _, err = rel.Update(ctx, exec, boil.Whitelist("patent_use_code")); err != nil { + return err + } + } + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.ProductPatents { + if rel != ri { + continue + } + + ln := len(o.R.ProductPatents) + if ln > 1 && i < ln-1 { + o.R.ProductPatents[i] = o.R.ProductPatents[ln-1] + } + o.R.ProductPatents = o.R.ProductPatents[:ln-1] + break + } + } + + return nil +} + +// PatentUseCodes retrieves all the records using an executor. +func PatentUseCodes(mods ...qm.QueryMod) patentUseCodeQuery { + mods = append(mods, qm.From("\"patent_use_codes\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"patent_use_codes\".*"}) + } + + return patentUseCodeQuery{q} +} + +// FindPatentUseCode retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindPatentUseCode(ctx context.Context, exec boil.ContextExecutor, patentUseCode string, selectCols ...string) (*PatentUseCode, error) { + patentUseCodeObj := &PatentUseCode{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"patent_use_codes\" where \"patent_use_code\"=?", sel, + ) + + q := queries.Raw(query, patentUseCode) + + err := q.Bind(ctx, exec, patentUseCodeObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from patent_use_codes") + } + + if err = patentUseCodeObj.doAfterSelectHooks(ctx, exec); err != nil { + return patentUseCodeObj, err + } + + return patentUseCodeObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *PatentUseCode) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no patent_use_codes provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(patentUseCodeColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + patentUseCodeInsertCacheMut.RLock() + cache, cached := patentUseCodeInsertCache[key] + patentUseCodeInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + patentUseCodeAllColumns, + patentUseCodeColumnsWithDefault, + patentUseCodeColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(patentUseCodeType, patentUseCodeMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(patentUseCodeType, patentUseCodeMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"patent_use_codes\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"patent_use_codes\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into patent_use_codes") + } + + if !cached { + patentUseCodeInsertCacheMut.Lock() + patentUseCodeInsertCache[key] = cache + patentUseCodeInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the PatentUseCode. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *PatentUseCode) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + patentUseCodeUpdateCacheMut.RLock() + cache, cached := patentUseCodeUpdateCache[key] + patentUseCodeUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + patentUseCodeAllColumns, + patentUseCodePrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update patent_use_codes, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"patent_use_codes\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, patentUseCodePrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(patentUseCodeType, patentUseCodeMapping, append(wl, patentUseCodePrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update patent_use_codes row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for patent_use_codes") + } + + if !cached { + patentUseCodeUpdateCacheMut.Lock() + patentUseCodeUpdateCache[key] = cache + patentUseCodeUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q patentUseCodeQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for patent_use_codes") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for patent_use_codes") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o PatentUseCodeSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), patentUseCodePrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"patent_use_codes\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, patentUseCodePrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in patentUseCode slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all patentUseCode") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *PatentUseCode) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no patent_use_codes provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(patentUseCodeColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + patentUseCodeUpsertCacheMut.RLock() + cache, cached := patentUseCodeUpsertCache[key] + patentUseCodeUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + patentUseCodeAllColumns, + patentUseCodeColumnsWithDefault, + patentUseCodeColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + patentUseCodeAllColumns, + patentUseCodePrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert patent_use_codes, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(patentUseCodePrimaryKeyColumns)) + copy(conflict, patentUseCodePrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"patent_use_codes\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(patentUseCodeType, patentUseCodeMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(patentUseCodeType, patentUseCodeMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert patent_use_codes") + } + + if !cached { + patentUseCodeUpsertCacheMut.Lock() + patentUseCodeUpsertCache[key] = cache + patentUseCodeUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single PatentUseCode record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *PatentUseCode) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no PatentUseCode provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), patentUseCodePrimaryKeyMapping) + sql := "DELETE FROM \"patent_use_codes\" WHERE \"patent_use_code\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from patent_use_codes") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for patent_use_codes") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q patentUseCodeQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no patentUseCodeQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from patent_use_codes") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for patent_use_codes") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o PatentUseCodeSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(patentUseCodeBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), patentUseCodePrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"patent_use_codes\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, patentUseCodePrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from patentUseCode slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for patent_use_codes") + } + + if len(patentUseCodeAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *PatentUseCode) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindPatentUseCode(ctx, exec, o.PatentUseCode) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *PatentUseCodeSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := PatentUseCodeSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), patentUseCodePrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"patent_use_codes\".* FROM \"patent_use_codes\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, patentUseCodePrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in PatentUseCodeSlice") + } + + *o = slice + + return nil +} + +// PatentUseCodeExists checks if the PatentUseCode row exists. +func PatentUseCodeExists(ctx context.Context, exec boil.ContextExecutor, patentUseCode string) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"patent_use_codes\" where \"patent_use_code\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, patentUseCode) + } + row := exec.QueryRowContext(ctx, sql, patentUseCode) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if patent_use_codes exists") + } + + return exists, nil +} diff --git a/models/predicted_binding_domains.go b/models/predicted_binding_domains.go new file mode 100644 index 0000000..bf8ae73 --- /dev/null +++ b/models/predicted_binding_domains.go @@ -0,0 +1,1330 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// PredictedBindingDomain is an object representing the database table. +type PredictedBindingDomain struct { + PredbindID int64 `boil:"predbind_id" json:"predbind_id" toml:"predbind_id" yaml:"predbind_id"` + ActivityID null.Int64 `boil:"activity_id" json:"activity_id,omitempty" toml:"activity_id" yaml:"activity_id,omitempty"` + SiteID null.Int64 `boil:"site_id" json:"site_id,omitempty" toml:"site_id" yaml:"site_id,omitempty"` + PredictionMethod null.String `boil:"prediction_method" json:"prediction_method,omitempty" toml:"prediction_method" yaml:"prediction_method,omitempty"` + Confidence null.String `boil:"confidence" json:"confidence,omitempty" toml:"confidence" yaml:"confidence,omitempty"` + + R *predictedBindingDomainR `boil:"-" json:"-" toml:"-" yaml:"-"` + L predictedBindingDomainL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var PredictedBindingDomainColumns = struct { + PredbindID string + ActivityID string + SiteID string + PredictionMethod string + Confidence string +}{ + PredbindID: "predbind_id", + ActivityID: "activity_id", + SiteID: "site_id", + PredictionMethod: "prediction_method", + Confidence: "confidence", +} + +var PredictedBindingDomainTableColumns = struct { + PredbindID string + ActivityID string + SiteID string + PredictionMethod string + Confidence string +}{ + PredbindID: "predicted_binding_domains.predbind_id", + ActivityID: "predicted_binding_domains.activity_id", + SiteID: "predicted_binding_domains.site_id", + PredictionMethod: "predicted_binding_domains.prediction_method", + Confidence: "predicted_binding_domains.confidence", +} + +// Generated where + +var PredictedBindingDomainWhere = struct { + PredbindID whereHelperint64 + ActivityID whereHelpernull_Int64 + SiteID whereHelpernull_Int64 + PredictionMethod whereHelpernull_String + Confidence whereHelpernull_String +}{ + PredbindID: whereHelperint64{field: "\"predicted_binding_domains\".\"predbind_id\""}, + ActivityID: whereHelpernull_Int64{field: "\"predicted_binding_domains\".\"activity_id\""}, + SiteID: whereHelpernull_Int64{field: "\"predicted_binding_domains\".\"site_id\""}, + PredictionMethod: whereHelpernull_String{field: "\"predicted_binding_domains\".\"prediction_method\""}, + Confidence: whereHelpernull_String{field: "\"predicted_binding_domains\".\"confidence\""}, +} + +// PredictedBindingDomainRels is where relationship names are stored. +var PredictedBindingDomainRels = struct { + Site string + Activity string +}{ + Site: "Site", + Activity: "Activity", +} + +// predictedBindingDomainR is where relationships are stored. +type predictedBindingDomainR struct { + Site *BindingSite `boil:"Site" json:"Site" toml:"Site" yaml:"Site"` + Activity *Activity `boil:"Activity" json:"Activity" toml:"Activity" yaml:"Activity"` +} + +// NewStruct creates a new relationship struct +func (*predictedBindingDomainR) NewStruct() *predictedBindingDomainR { + return &predictedBindingDomainR{} +} + +func (r *predictedBindingDomainR) GetSite() *BindingSite { + if r == nil { + return nil + } + return r.Site +} + +func (r *predictedBindingDomainR) GetActivity() *Activity { + if r == nil { + return nil + } + return r.Activity +} + +// predictedBindingDomainL is where Load methods for each relationship are stored. +type predictedBindingDomainL struct{} + +var ( + predictedBindingDomainAllColumns = []string{"predbind_id", "activity_id", "site_id", "prediction_method", "confidence"} + predictedBindingDomainColumnsWithoutDefault = []string{"predbind_id"} + predictedBindingDomainColumnsWithDefault = []string{"activity_id", "site_id", "prediction_method", "confidence"} + predictedBindingDomainPrimaryKeyColumns = []string{"predbind_id"} + predictedBindingDomainGeneratedColumns = []string{} +) + +type ( + // PredictedBindingDomainSlice is an alias for a slice of pointers to PredictedBindingDomain. + // This should almost always be used instead of []PredictedBindingDomain. + PredictedBindingDomainSlice []*PredictedBindingDomain + // PredictedBindingDomainHook is the signature for custom PredictedBindingDomain hook methods + PredictedBindingDomainHook func(context.Context, boil.ContextExecutor, *PredictedBindingDomain) error + + predictedBindingDomainQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + predictedBindingDomainType = reflect.TypeOf(&PredictedBindingDomain{}) + predictedBindingDomainMapping = queries.MakeStructMapping(predictedBindingDomainType) + predictedBindingDomainPrimaryKeyMapping, _ = queries.BindMapping(predictedBindingDomainType, predictedBindingDomainMapping, predictedBindingDomainPrimaryKeyColumns) + predictedBindingDomainInsertCacheMut sync.RWMutex + predictedBindingDomainInsertCache = make(map[string]insertCache) + predictedBindingDomainUpdateCacheMut sync.RWMutex + predictedBindingDomainUpdateCache = make(map[string]updateCache) + predictedBindingDomainUpsertCacheMut sync.RWMutex + predictedBindingDomainUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var predictedBindingDomainAfterSelectHooks []PredictedBindingDomainHook + +var predictedBindingDomainBeforeInsertHooks []PredictedBindingDomainHook +var predictedBindingDomainAfterInsertHooks []PredictedBindingDomainHook + +var predictedBindingDomainBeforeUpdateHooks []PredictedBindingDomainHook +var predictedBindingDomainAfterUpdateHooks []PredictedBindingDomainHook + +var predictedBindingDomainBeforeDeleteHooks []PredictedBindingDomainHook +var predictedBindingDomainAfterDeleteHooks []PredictedBindingDomainHook + +var predictedBindingDomainBeforeUpsertHooks []PredictedBindingDomainHook +var predictedBindingDomainAfterUpsertHooks []PredictedBindingDomainHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *PredictedBindingDomain) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range predictedBindingDomainAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *PredictedBindingDomain) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range predictedBindingDomainBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *PredictedBindingDomain) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range predictedBindingDomainAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *PredictedBindingDomain) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range predictedBindingDomainBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *PredictedBindingDomain) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range predictedBindingDomainAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *PredictedBindingDomain) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range predictedBindingDomainBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *PredictedBindingDomain) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range predictedBindingDomainAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *PredictedBindingDomain) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range predictedBindingDomainBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *PredictedBindingDomain) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range predictedBindingDomainAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddPredictedBindingDomainHook registers your hook function for all future operations. +func AddPredictedBindingDomainHook(hookPoint boil.HookPoint, predictedBindingDomainHook PredictedBindingDomainHook) { + switch hookPoint { + case boil.AfterSelectHook: + predictedBindingDomainAfterSelectHooks = append(predictedBindingDomainAfterSelectHooks, predictedBindingDomainHook) + case boil.BeforeInsertHook: + predictedBindingDomainBeforeInsertHooks = append(predictedBindingDomainBeforeInsertHooks, predictedBindingDomainHook) + case boil.AfterInsertHook: + predictedBindingDomainAfterInsertHooks = append(predictedBindingDomainAfterInsertHooks, predictedBindingDomainHook) + case boil.BeforeUpdateHook: + predictedBindingDomainBeforeUpdateHooks = append(predictedBindingDomainBeforeUpdateHooks, predictedBindingDomainHook) + case boil.AfterUpdateHook: + predictedBindingDomainAfterUpdateHooks = append(predictedBindingDomainAfterUpdateHooks, predictedBindingDomainHook) + case boil.BeforeDeleteHook: + predictedBindingDomainBeforeDeleteHooks = append(predictedBindingDomainBeforeDeleteHooks, predictedBindingDomainHook) + case boil.AfterDeleteHook: + predictedBindingDomainAfterDeleteHooks = append(predictedBindingDomainAfterDeleteHooks, predictedBindingDomainHook) + case boil.BeforeUpsertHook: + predictedBindingDomainBeforeUpsertHooks = append(predictedBindingDomainBeforeUpsertHooks, predictedBindingDomainHook) + case boil.AfterUpsertHook: + predictedBindingDomainAfterUpsertHooks = append(predictedBindingDomainAfterUpsertHooks, predictedBindingDomainHook) + } +} + +// One returns a single predictedBindingDomain record from the query. +func (q predictedBindingDomainQuery) One(ctx context.Context, exec boil.ContextExecutor) (*PredictedBindingDomain, error) { + o := &PredictedBindingDomain{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for predicted_binding_domains") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all PredictedBindingDomain records from the query. +func (q predictedBindingDomainQuery) All(ctx context.Context, exec boil.ContextExecutor) (PredictedBindingDomainSlice, error) { + var o []*PredictedBindingDomain + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to PredictedBindingDomain slice") + } + + if len(predictedBindingDomainAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all PredictedBindingDomain records in the query. +func (q predictedBindingDomainQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count predicted_binding_domains rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q predictedBindingDomainQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if predicted_binding_domains exists") + } + + return count > 0, nil +} + +// Site pointed to by the foreign key. +func (o *PredictedBindingDomain) Site(mods ...qm.QueryMod) bindingSiteQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"site_id\" = ?", o.SiteID), + } + + queryMods = append(queryMods, mods...) + + return BindingSites(queryMods...) +} + +// Activity pointed to by the foreign key. +func (o *PredictedBindingDomain) Activity(mods ...qm.QueryMod) activityQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"activity_id\" = ?", o.ActivityID), + } + + queryMods = append(queryMods, mods...) + + return Activities(queryMods...) +} + +// LoadSite allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (predictedBindingDomainL) LoadSite(ctx context.Context, e boil.ContextExecutor, singular bool, maybePredictedBindingDomain interface{}, mods queries.Applicator) error { + var slice []*PredictedBindingDomain + var object *PredictedBindingDomain + + if singular { + object = maybePredictedBindingDomain.(*PredictedBindingDomain) + } else { + slice = *maybePredictedBindingDomain.(*[]*PredictedBindingDomain) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &predictedBindingDomainR{} + } + if !queries.IsNil(object.SiteID) { + args = append(args, object.SiteID) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &predictedBindingDomainR{} + } + + for _, a := range args { + if queries.Equal(a, obj.SiteID) { + continue Outer + } + } + + if !queries.IsNil(obj.SiteID) { + args = append(args, obj.SiteID) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`binding_sites`), + qm.WhereIn(`binding_sites.site_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load BindingSite") + } + + var resultSlice []*BindingSite + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice BindingSite") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for binding_sites") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for binding_sites") + } + + if len(predictedBindingDomainAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Site = foreign + if foreign.R == nil { + foreign.R = &bindingSiteR{} + } + foreign.R.SitePredictedBindingDomains = append(foreign.R.SitePredictedBindingDomains, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.SiteID, foreign.SiteID) { + local.R.Site = foreign + if foreign.R == nil { + foreign.R = &bindingSiteR{} + } + foreign.R.SitePredictedBindingDomains = append(foreign.R.SitePredictedBindingDomains, local) + break + } + } + } + + return nil +} + +// LoadActivity allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (predictedBindingDomainL) LoadActivity(ctx context.Context, e boil.ContextExecutor, singular bool, maybePredictedBindingDomain interface{}, mods queries.Applicator) error { + var slice []*PredictedBindingDomain + var object *PredictedBindingDomain + + if singular { + object = maybePredictedBindingDomain.(*PredictedBindingDomain) + } else { + slice = *maybePredictedBindingDomain.(*[]*PredictedBindingDomain) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &predictedBindingDomainR{} + } + if !queries.IsNil(object.ActivityID) { + args = append(args, object.ActivityID) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &predictedBindingDomainR{} + } + + for _, a := range args { + if queries.Equal(a, obj.ActivityID) { + continue Outer + } + } + + if !queries.IsNil(obj.ActivityID) { + args = append(args, obj.ActivityID) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`activities`), + qm.WhereIn(`activities.activity_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load Activity") + } + + var resultSlice []*Activity + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice Activity") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for activities") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for activities") + } + + if len(predictedBindingDomainAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Activity = foreign + if foreign.R == nil { + foreign.R = &activityR{} + } + foreign.R.PredictedBindingDomains = append(foreign.R.PredictedBindingDomains, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.ActivityID, foreign.ActivityID) { + local.R.Activity = foreign + if foreign.R == nil { + foreign.R = &activityR{} + } + foreign.R.PredictedBindingDomains = append(foreign.R.PredictedBindingDomains, local) + break + } + } + } + + return nil +} + +// SetSite of the predictedBindingDomain to the related item. +// Sets o.R.Site to related. +// Adds o to related.R.SitePredictedBindingDomains. +func (o *PredictedBindingDomain) SetSite(ctx context.Context, exec boil.ContextExecutor, insert bool, related *BindingSite) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"predicted_binding_domains\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"site_id"}), + strmangle.WhereClause("\"", "\"", 0, predictedBindingDomainPrimaryKeyColumns), + ) + values := []interface{}{related.SiteID, o.PredbindID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.SiteID, related.SiteID) + if o.R == nil { + o.R = &predictedBindingDomainR{ + Site: related, + } + } else { + o.R.Site = related + } + + if related.R == nil { + related.R = &bindingSiteR{ + SitePredictedBindingDomains: PredictedBindingDomainSlice{o}, + } + } else { + related.R.SitePredictedBindingDomains = append(related.R.SitePredictedBindingDomains, o) + } + + return nil +} + +// RemoveSite relationship. +// Sets o.R.Site to nil. +// Removes o from all passed in related items' relationships struct. +func (o *PredictedBindingDomain) RemoveSite(ctx context.Context, exec boil.ContextExecutor, related *BindingSite) error { + var err error + + queries.SetScanner(&o.SiteID, nil) + if _, err = o.Update(ctx, exec, boil.Whitelist("site_id")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.Site = nil + } + if related == nil || related.R == nil { + return nil + } + + for i, ri := range related.R.SitePredictedBindingDomains { + if queries.Equal(o.SiteID, ri.SiteID) { + continue + } + + ln := len(related.R.SitePredictedBindingDomains) + if ln > 1 && i < ln-1 { + related.R.SitePredictedBindingDomains[i] = related.R.SitePredictedBindingDomains[ln-1] + } + related.R.SitePredictedBindingDomains = related.R.SitePredictedBindingDomains[:ln-1] + break + } + return nil +} + +// SetActivity of the predictedBindingDomain to the related item. +// Sets o.R.Activity to related. +// Adds o to related.R.PredictedBindingDomains. +func (o *PredictedBindingDomain) SetActivity(ctx context.Context, exec boil.ContextExecutor, insert bool, related *Activity) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"predicted_binding_domains\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"activity_id"}), + strmangle.WhereClause("\"", "\"", 0, predictedBindingDomainPrimaryKeyColumns), + ) + values := []interface{}{related.ActivityID, o.PredbindID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.ActivityID, related.ActivityID) + if o.R == nil { + o.R = &predictedBindingDomainR{ + Activity: related, + } + } else { + o.R.Activity = related + } + + if related.R == nil { + related.R = &activityR{ + PredictedBindingDomains: PredictedBindingDomainSlice{o}, + } + } else { + related.R.PredictedBindingDomains = append(related.R.PredictedBindingDomains, o) + } + + return nil +} + +// RemoveActivity relationship. +// Sets o.R.Activity to nil. +// Removes o from all passed in related items' relationships struct. +func (o *PredictedBindingDomain) RemoveActivity(ctx context.Context, exec boil.ContextExecutor, related *Activity) error { + var err error + + queries.SetScanner(&o.ActivityID, nil) + if _, err = o.Update(ctx, exec, boil.Whitelist("activity_id")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.Activity = nil + } + if related == nil || related.R == nil { + return nil + } + + for i, ri := range related.R.PredictedBindingDomains { + if queries.Equal(o.ActivityID, ri.ActivityID) { + continue + } + + ln := len(related.R.PredictedBindingDomains) + if ln > 1 && i < ln-1 { + related.R.PredictedBindingDomains[i] = related.R.PredictedBindingDomains[ln-1] + } + related.R.PredictedBindingDomains = related.R.PredictedBindingDomains[:ln-1] + break + } + return nil +} + +// PredictedBindingDomains retrieves all the records using an executor. +func PredictedBindingDomains(mods ...qm.QueryMod) predictedBindingDomainQuery { + mods = append(mods, qm.From("\"predicted_binding_domains\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"predicted_binding_domains\".*"}) + } + + return predictedBindingDomainQuery{q} +} + +// FindPredictedBindingDomain retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindPredictedBindingDomain(ctx context.Context, exec boil.ContextExecutor, predbindID int64, selectCols ...string) (*PredictedBindingDomain, error) { + predictedBindingDomainObj := &PredictedBindingDomain{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"predicted_binding_domains\" where \"predbind_id\"=?", sel, + ) + + q := queries.Raw(query, predbindID) + + err := q.Bind(ctx, exec, predictedBindingDomainObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from predicted_binding_domains") + } + + if err = predictedBindingDomainObj.doAfterSelectHooks(ctx, exec); err != nil { + return predictedBindingDomainObj, err + } + + return predictedBindingDomainObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *PredictedBindingDomain) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no predicted_binding_domains provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(predictedBindingDomainColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + predictedBindingDomainInsertCacheMut.RLock() + cache, cached := predictedBindingDomainInsertCache[key] + predictedBindingDomainInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + predictedBindingDomainAllColumns, + predictedBindingDomainColumnsWithDefault, + predictedBindingDomainColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(predictedBindingDomainType, predictedBindingDomainMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(predictedBindingDomainType, predictedBindingDomainMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"predicted_binding_domains\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"predicted_binding_domains\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into predicted_binding_domains") + } + + if !cached { + predictedBindingDomainInsertCacheMut.Lock() + predictedBindingDomainInsertCache[key] = cache + predictedBindingDomainInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the PredictedBindingDomain. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *PredictedBindingDomain) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + predictedBindingDomainUpdateCacheMut.RLock() + cache, cached := predictedBindingDomainUpdateCache[key] + predictedBindingDomainUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + predictedBindingDomainAllColumns, + predictedBindingDomainPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update predicted_binding_domains, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"predicted_binding_domains\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, predictedBindingDomainPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(predictedBindingDomainType, predictedBindingDomainMapping, append(wl, predictedBindingDomainPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update predicted_binding_domains row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for predicted_binding_domains") + } + + if !cached { + predictedBindingDomainUpdateCacheMut.Lock() + predictedBindingDomainUpdateCache[key] = cache + predictedBindingDomainUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q predictedBindingDomainQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for predicted_binding_domains") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for predicted_binding_domains") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o PredictedBindingDomainSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), predictedBindingDomainPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"predicted_binding_domains\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, predictedBindingDomainPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in predictedBindingDomain slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all predictedBindingDomain") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *PredictedBindingDomain) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no predicted_binding_domains provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(predictedBindingDomainColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + predictedBindingDomainUpsertCacheMut.RLock() + cache, cached := predictedBindingDomainUpsertCache[key] + predictedBindingDomainUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + predictedBindingDomainAllColumns, + predictedBindingDomainColumnsWithDefault, + predictedBindingDomainColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + predictedBindingDomainAllColumns, + predictedBindingDomainPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert predicted_binding_domains, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(predictedBindingDomainPrimaryKeyColumns)) + copy(conflict, predictedBindingDomainPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"predicted_binding_domains\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(predictedBindingDomainType, predictedBindingDomainMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(predictedBindingDomainType, predictedBindingDomainMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert predicted_binding_domains") + } + + if !cached { + predictedBindingDomainUpsertCacheMut.Lock() + predictedBindingDomainUpsertCache[key] = cache + predictedBindingDomainUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single PredictedBindingDomain record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *PredictedBindingDomain) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no PredictedBindingDomain provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), predictedBindingDomainPrimaryKeyMapping) + sql := "DELETE FROM \"predicted_binding_domains\" WHERE \"predbind_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from predicted_binding_domains") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for predicted_binding_domains") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q predictedBindingDomainQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no predictedBindingDomainQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from predicted_binding_domains") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for predicted_binding_domains") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o PredictedBindingDomainSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(predictedBindingDomainBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), predictedBindingDomainPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"predicted_binding_domains\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, predictedBindingDomainPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from predictedBindingDomain slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for predicted_binding_domains") + } + + if len(predictedBindingDomainAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *PredictedBindingDomain) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindPredictedBindingDomain(ctx, exec, o.PredbindID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *PredictedBindingDomainSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := PredictedBindingDomainSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), predictedBindingDomainPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"predicted_binding_domains\".* FROM \"predicted_binding_domains\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, predictedBindingDomainPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in PredictedBindingDomainSlice") + } + + *o = slice + + return nil +} + +// PredictedBindingDomainExists checks if the PredictedBindingDomain row exists. +func PredictedBindingDomainExists(ctx context.Context, exec boil.ContextExecutor, predbindID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"predicted_binding_domains\" where \"predbind_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, predbindID) + } + row := exec.QueryRowContext(ctx, sql, predbindID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if predicted_binding_domains exists") + } + + return exists, nil +} diff --git a/models/product_patents.go b/models/product_patents.go new file mode 100644 index 0000000..8d597f2 --- /dev/null +++ b/models/product_patents.go @@ -0,0 +1,1366 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// ProductPatent is an object representing the database table. +type ProductPatent struct { + ProdPatID int64 `boil:"prod_pat_id" json:"prod_pat_id" toml:"prod_pat_id" yaml:"prod_pat_id"` + ProductID string `boil:"product_id" json:"product_id" toml:"product_id" yaml:"product_id"` + PatentNo string `boil:"patent_no" json:"patent_no" toml:"patent_no" yaml:"patent_no"` + PatentExpireDate time.Time `boil:"patent_expire_date" json:"patent_expire_date" toml:"patent_expire_date" yaml:"patent_expire_date"` + DrugSubstanceFlag int16 `boil:"drug_substance_flag" json:"drug_substance_flag" toml:"drug_substance_flag" yaml:"drug_substance_flag"` + DrugProductFlag int16 `boil:"drug_product_flag" json:"drug_product_flag" toml:"drug_product_flag" yaml:"drug_product_flag"` + PatentUseCode null.String `boil:"patent_use_code" json:"patent_use_code,omitempty" toml:"patent_use_code" yaml:"patent_use_code,omitempty"` + DelistFlag int16 `boil:"delist_flag" json:"delist_flag" toml:"delist_flag" yaml:"delist_flag"` + SubmissionDate null.Time `boil:"submission_date" json:"submission_date,omitempty" toml:"submission_date" yaml:"submission_date,omitempty"` + + R *productPatentR `boil:"-" json:"-" toml:"-" yaml:"-"` + L productPatentL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var ProductPatentColumns = struct { + ProdPatID string + ProductID string + PatentNo string + PatentExpireDate string + DrugSubstanceFlag string + DrugProductFlag string + PatentUseCode string + DelistFlag string + SubmissionDate string +}{ + ProdPatID: "prod_pat_id", + ProductID: "product_id", + PatentNo: "patent_no", + PatentExpireDate: "patent_expire_date", + DrugSubstanceFlag: "drug_substance_flag", + DrugProductFlag: "drug_product_flag", + PatentUseCode: "patent_use_code", + DelistFlag: "delist_flag", + SubmissionDate: "submission_date", +} + +var ProductPatentTableColumns = struct { + ProdPatID string + ProductID string + PatentNo string + PatentExpireDate string + DrugSubstanceFlag string + DrugProductFlag string + PatentUseCode string + DelistFlag string + SubmissionDate string +}{ + ProdPatID: "product_patents.prod_pat_id", + ProductID: "product_patents.product_id", + PatentNo: "product_patents.patent_no", + PatentExpireDate: "product_patents.patent_expire_date", + DrugSubstanceFlag: "product_patents.drug_substance_flag", + DrugProductFlag: "product_patents.drug_product_flag", + PatentUseCode: "product_patents.patent_use_code", + DelistFlag: "product_patents.delist_flag", + SubmissionDate: "product_patents.submission_date", +} + +// Generated where + +type whereHelpertime_Time struct{ field string } + +func (w whereHelpertime_Time) EQ(x time.Time) qm.QueryMod { + return qmhelper.Where(w.field, qmhelper.EQ, x) +} +func (w whereHelpertime_Time) NEQ(x time.Time) qm.QueryMod { + return qmhelper.Where(w.field, qmhelper.NEQ, x) +} +func (w whereHelpertime_Time) LT(x time.Time) qm.QueryMod { + return qmhelper.Where(w.field, qmhelper.LT, x) +} +func (w whereHelpertime_Time) LTE(x time.Time) qm.QueryMod { + return qmhelper.Where(w.field, qmhelper.LTE, x) +} +func (w whereHelpertime_Time) GT(x time.Time) qm.QueryMod { + return qmhelper.Where(w.field, qmhelper.GT, x) +} +func (w whereHelpertime_Time) GTE(x time.Time) qm.QueryMod { + return qmhelper.Where(w.field, qmhelper.GTE, x) +} + +type whereHelpernull_Time struct{ field string } + +func (w whereHelpernull_Time) EQ(x null.Time) qm.QueryMod { + return qmhelper.WhereNullEQ(w.field, false, x) +} +func (w whereHelpernull_Time) NEQ(x null.Time) qm.QueryMod { + return qmhelper.WhereNullEQ(w.field, true, x) +} +func (w whereHelpernull_Time) LT(x null.Time) qm.QueryMod { + return qmhelper.Where(w.field, qmhelper.LT, x) +} +func (w whereHelpernull_Time) LTE(x null.Time) qm.QueryMod { + return qmhelper.Where(w.field, qmhelper.LTE, x) +} +func (w whereHelpernull_Time) GT(x null.Time) qm.QueryMod { + return qmhelper.Where(w.field, qmhelper.GT, x) +} +func (w whereHelpernull_Time) GTE(x null.Time) qm.QueryMod { + return qmhelper.Where(w.field, qmhelper.GTE, x) +} + +func (w whereHelpernull_Time) IsNull() qm.QueryMod { return qmhelper.WhereIsNull(w.field) } +func (w whereHelpernull_Time) IsNotNull() qm.QueryMod { return qmhelper.WhereIsNotNull(w.field) } + +var ProductPatentWhere = struct { + ProdPatID whereHelperint64 + ProductID whereHelperstring + PatentNo whereHelperstring + PatentExpireDate whereHelpertime_Time + DrugSubstanceFlag whereHelperint16 + DrugProductFlag whereHelperint16 + PatentUseCode whereHelpernull_String + DelistFlag whereHelperint16 + SubmissionDate whereHelpernull_Time +}{ + ProdPatID: whereHelperint64{field: "\"product_patents\".\"prod_pat_id\""}, + ProductID: whereHelperstring{field: "\"product_patents\".\"product_id\""}, + PatentNo: whereHelperstring{field: "\"product_patents\".\"patent_no\""}, + PatentExpireDate: whereHelpertime_Time{field: "\"product_patents\".\"patent_expire_date\""}, + DrugSubstanceFlag: whereHelperint16{field: "\"product_patents\".\"drug_substance_flag\""}, + DrugProductFlag: whereHelperint16{field: "\"product_patents\".\"drug_product_flag\""}, + PatentUseCode: whereHelpernull_String{field: "\"product_patents\".\"patent_use_code\""}, + DelistFlag: whereHelperint16{field: "\"product_patents\".\"delist_flag\""}, + SubmissionDate: whereHelpernull_Time{field: "\"product_patents\".\"submission_date\""}, +} + +// ProductPatentRels is where relationship names are stored. +var ProductPatentRels = struct { + ProductPatentPatentUseCode string + Product string +}{ + ProductPatentPatentUseCode: "ProductPatentPatentUseCode", + Product: "Product", +} + +// productPatentR is where relationships are stored. +type productPatentR struct { + ProductPatentPatentUseCode *PatentUseCode `boil:"ProductPatentPatentUseCode" json:"ProductPatentPatentUseCode" toml:"ProductPatentPatentUseCode" yaml:"ProductPatentPatentUseCode"` + Product *Product `boil:"Product" json:"Product" toml:"Product" yaml:"Product"` +} + +// NewStruct creates a new relationship struct +func (*productPatentR) NewStruct() *productPatentR { + return &productPatentR{} +} + +func (r *productPatentR) GetProductPatentPatentUseCode() *PatentUseCode { + if r == nil { + return nil + } + return r.ProductPatentPatentUseCode +} + +func (r *productPatentR) GetProduct() *Product { + if r == nil { + return nil + } + return r.Product +} + +// productPatentL is where Load methods for each relationship are stored. +type productPatentL struct{} + +var ( + productPatentAllColumns = []string{"prod_pat_id", "product_id", "patent_no", "patent_expire_date", "drug_substance_flag", "drug_product_flag", "patent_use_code", "delist_flag", "submission_date"} + productPatentColumnsWithoutDefault = []string{"prod_pat_id", "product_id", "patent_no", "patent_expire_date", "drug_substance_flag", "drug_product_flag", "delist_flag"} + productPatentColumnsWithDefault = []string{"patent_use_code", "submission_date"} + productPatentPrimaryKeyColumns = []string{"prod_pat_id"} + productPatentGeneratedColumns = []string{} +) + +type ( + // ProductPatentSlice is an alias for a slice of pointers to ProductPatent. + // This should almost always be used instead of []ProductPatent. + ProductPatentSlice []*ProductPatent + // ProductPatentHook is the signature for custom ProductPatent hook methods + ProductPatentHook func(context.Context, boil.ContextExecutor, *ProductPatent) error + + productPatentQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + productPatentType = reflect.TypeOf(&ProductPatent{}) + productPatentMapping = queries.MakeStructMapping(productPatentType) + productPatentPrimaryKeyMapping, _ = queries.BindMapping(productPatentType, productPatentMapping, productPatentPrimaryKeyColumns) + productPatentInsertCacheMut sync.RWMutex + productPatentInsertCache = make(map[string]insertCache) + productPatentUpdateCacheMut sync.RWMutex + productPatentUpdateCache = make(map[string]updateCache) + productPatentUpsertCacheMut sync.RWMutex + productPatentUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var productPatentAfterSelectHooks []ProductPatentHook + +var productPatentBeforeInsertHooks []ProductPatentHook +var productPatentAfterInsertHooks []ProductPatentHook + +var productPatentBeforeUpdateHooks []ProductPatentHook +var productPatentAfterUpdateHooks []ProductPatentHook + +var productPatentBeforeDeleteHooks []ProductPatentHook +var productPatentAfterDeleteHooks []ProductPatentHook + +var productPatentBeforeUpsertHooks []ProductPatentHook +var productPatentAfterUpsertHooks []ProductPatentHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *ProductPatent) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range productPatentAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *ProductPatent) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range productPatentBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *ProductPatent) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range productPatentAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *ProductPatent) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range productPatentBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *ProductPatent) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range productPatentAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *ProductPatent) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range productPatentBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *ProductPatent) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range productPatentAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *ProductPatent) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range productPatentBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *ProductPatent) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range productPatentAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddProductPatentHook registers your hook function for all future operations. +func AddProductPatentHook(hookPoint boil.HookPoint, productPatentHook ProductPatentHook) { + switch hookPoint { + case boil.AfterSelectHook: + productPatentAfterSelectHooks = append(productPatentAfterSelectHooks, productPatentHook) + case boil.BeforeInsertHook: + productPatentBeforeInsertHooks = append(productPatentBeforeInsertHooks, productPatentHook) + case boil.AfterInsertHook: + productPatentAfterInsertHooks = append(productPatentAfterInsertHooks, productPatentHook) + case boil.BeforeUpdateHook: + productPatentBeforeUpdateHooks = append(productPatentBeforeUpdateHooks, productPatentHook) + case boil.AfterUpdateHook: + productPatentAfterUpdateHooks = append(productPatentAfterUpdateHooks, productPatentHook) + case boil.BeforeDeleteHook: + productPatentBeforeDeleteHooks = append(productPatentBeforeDeleteHooks, productPatentHook) + case boil.AfterDeleteHook: + productPatentAfterDeleteHooks = append(productPatentAfterDeleteHooks, productPatentHook) + case boil.BeforeUpsertHook: + productPatentBeforeUpsertHooks = append(productPatentBeforeUpsertHooks, productPatentHook) + case boil.AfterUpsertHook: + productPatentAfterUpsertHooks = append(productPatentAfterUpsertHooks, productPatentHook) + } +} + +// One returns a single productPatent record from the query. +func (q productPatentQuery) One(ctx context.Context, exec boil.ContextExecutor) (*ProductPatent, error) { + o := &ProductPatent{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for product_patents") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all ProductPatent records from the query. +func (q productPatentQuery) All(ctx context.Context, exec boil.ContextExecutor) (ProductPatentSlice, error) { + var o []*ProductPatent + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to ProductPatent slice") + } + + if len(productPatentAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all ProductPatent records in the query. +func (q productPatentQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count product_patents rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q productPatentQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if product_patents exists") + } + + return count > 0, nil +} + +// ProductPatentPatentUseCode pointed to by the foreign key. +func (o *ProductPatent) ProductPatentPatentUseCode(mods ...qm.QueryMod) patentUseCodeQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"patent_use_code\" = ?", o.PatentUseCode), + } + + queryMods = append(queryMods, mods...) + + return PatentUseCodes(queryMods...) +} + +// Product pointed to by the foreign key. +func (o *ProductPatent) Product(mods ...qm.QueryMod) productQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"product_id\" = ?", o.ProductID), + } + + queryMods = append(queryMods, mods...) + + return Products(queryMods...) +} + +// LoadProductPatentPatentUseCode allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (productPatentL) LoadProductPatentPatentUseCode(ctx context.Context, e boil.ContextExecutor, singular bool, maybeProductPatent interface{}, mods queries.Applicator) error { + var slice []*ProductPatent + var object *ProductPatent + + if singular { + object = maybeProductPatent.(*ProductPatent) + } else { + slice = *maybeProductPatent.(*[]*ProductPatent) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &productPatentR{} + } + if !queries.IsNil(object.PatentUseCode) { + args = append(args, object.PatentUseCode) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &productPatentR{} + } + + for _, a := range args { + if queries.Equal(a, obj.PatentUseCode) { + continue Outer + } + } + + if !queries.IsNil(obj.PatentUseCode) { + args = append(args, obj.PatentUseCode) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`patent_use_codes`), + qm.WhereIn(`patent_use_codes.patent_use_code in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load PatentUseCode") + } + + var resultSlice []*PatentUseCode + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice PatentUseCode") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for patent_use_codes") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for patent_use_codes") + } + + if len(productPatentAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.ProductPatentPatentUseCode = foreign + if foreign.R == nil { + foreign.R = &patentUseCodeR{} + } + foreign.R.ProductPatents = append(foreign.R.ProductPatents, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.PatentUseCode, foreign.PatentUseCode) { + local.R.ProductPatentPatentUseCode = foreign + if foreign.R == nil { + foreign.R = &patentUseCodeR{} + } + foreign.R.ProductPatents = append(foreign.R.ProductPatents, local) + break + } + } + } + + return nil +} + +// LoadProduct allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (productPatentL) LoadProduct(ctx context.Context, e boil.ContextExecutor, singular bool, maybeProductPatent interface{}, mods queries.Applicator) error { + var slice []*ProductPatent + var object *ProductPatent + + if singular { + object = maybeProductPatent.(*ProductPatent) + } else { + slice = *maybeProductPatent.(*[]*ProductPatent) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &productPatentR{} + } + args = append(args, object.ProductID) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &productPatentR{} + } + + for _, a := range args { + if a == obj.ProductID { + continue Outer + } + } + + args = append(args, obj.ProductID) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`products`), + qm.WhereIn(`products.product_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load Product") + } + + var resultSlice []*Product + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice Product") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for products") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for products") + } + + if len(productPatentAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Product = foreign + if foreign.R == nil { + foreign.R = &productR{} + } + foreign.R.ProductPatents = append(foreign.R.ProductPatents, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.ProductID == foreign.ProductID { + local.R.Product = foreign + if foreign.R == nil { + foreign.R = &productR{} + } + foreign.R.ProductPatents = append(foreign.R.ProductPatents, local) + break + } + } + } + + return nil +} + +// SetProductPatentPatentUseCode of the productPatent to the related item. +// Sets o.R.ProductPatentPatentUseCode to related. +// Adds o to related.R.ProductPatents. +func (o *ProductPatent) SetProductPatentPatentUseCode(ctx context.Context, exec boil.ContextExecutor, insert bool, related *PatentUseCode) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"product_patents\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"patent_use_code"}), + strmangle.WhereClause("\"", "\"", 0, productPatentPrimaryKeyColumns), + ) + values := []interface{}{related.PatentUseCode, o.ProdPatID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.PatentUseCode, related.PatentUseCode) + if o.R == nil { + o.R = &productPatentR{ + ProductPatentPatentUseCode: related, + } + } else { + o.R.ProductPatentPatentUseCode = related + } + + if related.R == nil { + related.R = &patentUseCodeR{ + ProductPatents: ProductPatentSlice{o}, + } + } else { + related.R.ProductPatents = append(related.R.ProductPatents, o) + } + + return nil +} + +// RemoveProductPatentPatentUseCode relationship. +// Sets o.R.ProductPatentPatentUseCode to nil. +// Removes o from all passed in related items' relationships struct. +func (o *ProductPatent) RemoveProductPatentPatentUseCode(ctx context.Context, exec boil.ContextExecutor, related *PatentUseCode) error { + var err error + + queries.SetScanner(&o.PatentUseCode, nil) + if _, err = o.Update(ctx, exec, boil.Whitelist("patent_use_code")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.ProductPatentPatentUseCode = nil + } + if related == nil || related.R == nil { + return nil + } + + for i, ri := range related.R.ProductPatents { + if queries.Equal(o.PatentUseCode, ri.PatentUseCode) { + continue + } + + ln := len(related.R.ProductPatents) + if ln > 1 && i < ln-1 { + related.R.ProductPatents[i] = related.R.ProductPatents[ln-1] + } + related.R.ProductPatents = related.R.ProductPatents[:ln-1] + break + } + return nil +} + +// SetProduct of the productPatent to the related item. +// Sets o.R.Product to related. +// Adds o to related.R.ProductPatents. +func (o *ProductPatent) SetProduct(ctx context.Context, exec boil.ContextExecutor, insert bool, related *Product) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"product_patents\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"product_id"}), + strmangle.WhereClause("\"", "\"", 0, productPatentPrimaryKeyColumns), + ) + values := []interface{}{related.ProductID, o.ProdPatID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.ProductID = related.ProductID + if o.R == nil { + o.R = &productPatentR{ + Product: related, + } + } else { + o.R.Product = related + } + + if related.R == nil { + related.R = &productR{ + ProductPatents: ProductPatentSlice{o}, + } + } else { + related.R.ProductPatents = append(related.R.ProductPatents, o) + } + + return nil +} + +// ProductPatents retrieves all the records using an executor. +func ProductPatents(mods ...qm.QueryMod) productPatentQuery { + mods = append(mods, qm.From("\"product_patents\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"product_patents\".*"}) + } + + return productPatentQuery{q} +} + +// FindProductPatent retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindProductPatent(ctx context.Context, exec boil.ContextExecutor, prodPatID int64, selectCols ...string) (*ProductPatent, error) { + productPatentObj := &ProductPatent{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"product_patents\" where \"prod_pat_id\"=?", sel, + ) + + q := queries.Raw(query, prodPatID) + + err := q.Bind(ctx, exec, productPatentObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from product_patents") + } + + if err = productPatentObj.doAfterSelectHooks(ctx, exec); err != nil { + return productPatentObj, err + } + + return productPatentObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *ProductPatent) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no product_patents provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(productPatentColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + productPatentInsertCacheMut.RLock() + cache, cached := productPatentInsertCache[key] + productPatentInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + productPatentAllColumns, + productPatentColumnsWithDefault, + productPatentColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(productPatentType, productPatentMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(productPatentType, productPatentMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"product_patents\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"product_patents\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into product_patents") + } + + if !cached { + productPatentInsertCacheMut.Lock() + productPatentInsertCache[key] = cache + productPatentInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the ProductPatent. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *ProductPatent) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + productPatentUpdateCacheMut.RLock() + cache, cached := productPatentUpdateCache[key] + productPatentUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + productPatentAllColumns, + productPatentPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update product_patents, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"product_patents\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, productPatentPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(productPatentType, productPatentMapping, append(wl, productPatentPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update product_patents row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for product_patents") + } + + if !cached { + productPatentUpdateCacheMut.Lock() + productPatentUpdateCache[key] = cache + productPatentUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q productPatentQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for product_patents") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for product_patents") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o ProductPatentSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), productPatentPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"product_patents\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, productPatentPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in productPatent slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all productPatent") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *ProductPatent) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no product_patents provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(productPatentColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + productPatentUpsertCacheMut.RLock() + cache, cached := productPatentUpsertCache[key] + productPatentUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + productPatentAllColumns, + productPatentColumnsWithDefault, + productPatentColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + productPatentAllColumns, + productPatentPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert product_patents, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(productPatentPrimaryKeyColumns)) + copy(conflict, productPatentPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"product_patents\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(productPatentType, productPatentMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(productPatentType, productPatentMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert product_patents") + } + + if !cached { + productPatentUpsertCacheMut.Lock() + productPatentUpsertCache[key] = cache + productPatentUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single ProductPatent record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *ProductPatent) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no ProductPatent provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), productPatentPrimaryKeyMapping) + sql := "DELETE FROM \"product_patents\" WHERE \"prod_pat_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from product_patents") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for product_patents") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q productPatentQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no productPatentQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from product_patents") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for product_patents") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o ProductPatentSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(productPatentBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), productPatentPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"product_patents\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, productPatentPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from productPatent slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for product_patents") + } + + if len(productPatentAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *ProductPatent) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindProductPatent(ctx, exec, o.ProdPatID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *ProductPatentSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := ProductPatentSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), productPatentPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"product_patents\".* FROM \"product_patents\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, productPatentPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in ProductPatentSlice") + } + + *o = slice + + return nil +} + +// ProductPatentExists checks if the ProductPatent row exists. +func ProductPatentExists(ctx context.Context, exec boil.ContextExecutor, prodPatID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"product_patents\" where \"prod_pat_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, prodPatID) + } + row := exec.QueryRowContext(ctx, sql, prodPatID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if product_patents exists") + } + + return exists, nil +} diff --git a/models/products.go b/models/products.go new file mode 100644 index 0000000..b84b6cf --- /dev/null +++ b/models/products.go @@ -0,0 +1,1318 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// Product is an object representing the database table. +type Product struct { + DosageForm null.String `boil:"dosage_form" json:"dosage_form,omitempty" toml:"dosage_form" yaml:"dosage_form,omitempty"` + Route null.String `boil:"route" json:"route,omitempty" toml:"route" yaml:"route,omitempty"` + TradeName null.String `boil:"trade_name" json:"trade_name,omitempty" toml:"trade_name" yaml:"trade_name,omitempty"` + ApprovalDate null.Time `boil:"approval_date" json:"approval_date,omitempty" toml:"approval_date" yaml:"approval_date,omitempty"` + AdType null.String `boil:"ad_type" json:"ad_type,omitempty" toml:"ad_type" yaml:"ad_type,omitempty"` + Oral null.Int16 `boil:"oral" json:"oral,omitempty" toml:"oral" yaml:"oral,omitempty"` + Topical null.Int16 `boil:"topical" json:"topical,omitempty" toml:"topical" yaml:"topical,omitempty"` + Parenteral null.Int16 `boil:"parenteral" json:"parenteral,omitempty" toml:"parenteral" yaml:"parenteral,omitempty"` + BlackBoxWarning null.Int16 `boil:"black_box_warning" json:"black_box_warning,omitempty" toml:"black_box_warning" yaml:"black_box_warning,omitempty"` + ApplicantFullName null.String `boil:"applicant_full_name" json:"applicant_full_name,omitempty" toml:"applicant_full_name" yaml:"applicant_full_name,omitempty"` + InnovatorCompany null.Int16 `boil:"innovator_company" json:"innovator_company,omitempty" toml:"innovator_company" yaml:"innovator_company,omitempty"` + ProductID string `boil:"product_id" json:"product_id" toml:"product_id" yaml:"product_id"` + NdaType null.String `boil:"nda_type" json:"nda_type,omitempty" toml:"nda_type" yaml:"nda_type,omitempty"` + + R *productR `boil:"-" json:"-" toml:"-" yaml:"-"` + L productL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var ProductColumns = struct { + DosageForm string + Route string + TradeName string + ApprovalDate string + AdType string + Oral string + Topical string + Parenteral string + BlackBoxWarning string + ApplicantFullName string + InnovatorCompany string + ProductID string + NdaType string +}{ + DosageForm: "dosage_form", + Route: "route", + TradeName: "trade_name", + ApprovalDate: "approval_date", + AdType: "ad_type", + Oral: "oral", + Topical: "topical", + Parenteral: "parenteral", + BlackBoxWarning: "black_box_warning", + ApplicantFullName: "applicant_full_name", + InnovatorCompany: "innovator_company", + ProductID: "product_id", + NdaType: "nda_type", +} + +var ProductTableColumns = struct { + DosageForm string + Route string + TradeName string + ApprovalDate string + AdType string + Oral string + Topical string + Parenteral string + BlackBoxWarning string + ApplicantFullName string + InnovatorCompany string + ProductID string + NdaType string +}{ + DosageForm: "products.dosage_form", + Route: "products.route", + TradeName: "products.trade_name", + ApprovalDate: "products.approval_date", + AdType: "products.ad_type", + Oral: "products.oral", + Topical: "products.topical", + Parenteral: "products.parenteral", + BlackBoxWarning: "products.black_box_warning", + ApplicantFullName: "products.applicant_full_name", + InnovatorCompany: "products.innovator_company", + ProductID: "products.product_id", + NdaType: "products.nda_type", +} + +// Generated where + +var ProductWhere = struct { + DosageForm whereHelpernull_String + Route whereHelpernull_String + TradeName whereHelpernull_String + ApprovalDate whereHelpernull_Time + AdType whereHelpernull_String + Oral whereHelpernull_Int16 + Topical whereHelpernull_Int16 + Parenteral whereHelpernull_Int16 + BlackBoxWarning whereHelpernull_Int16 + ApplicantFullName whereHelpernull_String + InnovatorCompany whereHelpernull_Int16 + ProductID whereHelperstring + NdaType whereHelpernull_String +}{ + DosageForm: whereHelpernull_String{field: "\"products\".\"dosage_form\""}, + Route: whereHelpernull_String{field: "\"products\".\"route\""}, + TradeName: whereHelpernull_String{field: "\"products\".\"trade_name\""}, + ApprovalDate: whereHelpernull_Time{field: "\"products\".\"approval_date\""}, + AdType: whereHelpernull_String{field: "\"products\".\"ad_type\""}, + Oral: whereHelpernull_Int16{field: "\"products\".\"oral\""}, + Topical: whereHelpernull_Int16{field: "\"products\".\"topical\""}, + Parenteral: whereHelpernull_Int16{field: "\"products\".\"parenteral\""}, + BlackBoxWarning: whereHelpernull_Int16{field: "\"products\".\"black_box_warning\""}, + ApplicantFullName: whereHelpernull_String{field: "\"products\".\"applicant_full_name\""}, + InnovatorCompany: whereHelpernull_Int16{field: "\"products\".\"innovator_company\""}, + ProductID: whereHelperstring{field: "\"products\".\"product_id\""}, + NdaType: whereHelpernull_String{field: "\"products\".\"nda_type\""}, +} + +// ProductRels is where relationship names are stored. +var ProductRels = struct { + Formulations string + ProductPatents string +}{ + Formulations: "Formulations", + ProductPatents: "ProductPatents", +} + +// productR is where relationships are stored. +type productR struct { + Formulations FormulationSlice `boil:"Formulations" json:"Formulations" toml:"Formulations" yaml:"Formulations"` + ProductPatents ProductPatentSlice `boil:"ProductPatents" json:"ProductPatents" toml:"ProductPatents" yaml:"ProductPatents"` +} + +// NewStruct creates a new relationship struct +func (*productR) NewStruct() *productR { + return &productR{} +} + +func (r *productR) GetFormulations() FormulationSlice { + if r == nil { + return nil + } + return r.Formulations +} + +func (r *productR) GetProductPatents() ProductPatentSlice { + if r == nil { + return nil + } + return r.ProductPatents +} + +// productL is where Load methods for each relationship are stored. +type productL struct{} + +var ( + productAllColumns = []string{"dosage_form", "route", "trade_name", "approval_date", "ad_type", "oral", "topical", "parenteral", "black_box_warning", "applicant_full_name", "innovator_company", "product_id", "nda_type"} + productColumnsWithoutDefault = []string{"product_id"} + productColumnsWithDefault = []string{"dosage_form", "route", "trade_name", "approval_date", "ad_type", "oral", "topical", "parenteral", "black_box_warning", "applicant_full_name", "innovator_company", "nda_type"} + productPrimaryKeyColumns = []string{"product_id"} + productGeneratedColumns = []string{} +) + +type ( + // ProductSlice is an alias for a slice of pointers to Product. + // This should almost always be used instead of []Product. + ProductSlice []*Product + // ProductHook is the signature for custom Product hook methods + ProductHook func(context.Context, boil.ContextExecutor, *Product) error + + productQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + productType = reflect.TypeOf(&Product{}) + productMapping = queries.MakeStructMapping(productType) + productPrimaryKeyMapping, _ = queries.BindMapping(productType, productMapping, productPrimaryKeyColumns) + productInsertCacheMut sync.RWMutex + productInsertCache = make(map[string]insertCache) + productUpdateCacheMut sync.RWMutex + productUpdateCache = make(map[string]updateCache) + productUpsertCacheMut sync.RWMutex + productUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var productAfterSelectHooks []ProductHook + +var productBeforeInsertHooks []ProductHook +var productAfterInsertHooks []ProductHook + +var productBeforeUpdateHooks []ProductHook +var productAfterUpdateHooks []ProductHook + +var productBeforeDeleteHooks []ProductHook +var productAfterDeleteHooks []ProductHook + +var productBeforeUpsertHooks []ProductHook +var productAfterUpsertHooks []ProductHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *Product) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range productAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *Product) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range productBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *Product) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range productAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *Product) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range productBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *Product) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range productAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *Product) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range productBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *Product) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range productAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *Product) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range productBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *Product) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range productAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddProductHook registers your hook function for all future operations. +func AddProductHook(hookPoint boil.HookPoint, productHook ProductHook) { + switch hookPoint { + case boil.AfterSelectHook: + productAfterSelectHooks = append(productAfterSelectHooks, productHook) + case boil.BeforeInsertHook: + productBeforeInsertHooks = append(productBeforeInsertHooks, productHook) + case boil.AfterInsertHook: + productAfterInsertHooks = append(productAfterInsertHooks, productHook) + case boil.BeforeUpdateHook: + productBeforeUpdateHooks = append(productBeforeUpdateHooks, productHook) + case boil.AfterUpdateHook: + productAfterUpdateHooks = append(productAfterUpdateHooks, productHook) + case boil.BeforeDeleteHook: + productBeforeDeleteHooks = append(productBeforeDeleteHooks, productHook) + case boil.AfterDeleteHook: + productAfterDeleteHooks = append(productAfterDeleteHooks, productHook) + case boil.BeforeUpsertHook: + productBeforeUpsertHooks = append(productBeforeUpsertHooks, productHook) + case boil.AfterUpsertHook: + productAfterUpsertHooks = append(productAfterUpsertHooks, productHook) + } +} + +// One returns a single product record from the query. +func (q productQuery) One(ctx context.Context, exec boil.ContextExecutor) (*Product, error) { + o := &Product{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for products") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all Product records from the query. +func (q productQuery) All(ctx context.Context, exec boil.ContextExecutor) (ProductSlice, error) { + var o []*Product + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to Product slice") + } + + if len(productAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all Product records in the query. +func (q productQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count products rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q productQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if products exists") + } + + return count > 0, nil +} + +// Formulations retrieves all the formulation's Formulations with an executor. +func (o *Product) Formulations(mods ...qm.QueryMod) formulationQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"formulations\".\"product_id\"=?", o.ProductID), + ) + + return Formulations(queryMods...) +} + +// ProductPatents retrieves all the product_patent's ProductPatents with an executor. +func (o *Product) ProductPatents(mods ...qm.QueryMod) productPatentQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"product_patents\".\"product_id\"=?", o.ProductID), + ) + + return ProductPatents(queryMods...) +} + +// LoadFormulations allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (productL) LoadFormulations(ctx context.Context, e boil.ContextExecutor, singular bool, maybeProduct interface{}, mods queries.Applicator) error { + var slice []*Product + var object *Product + + if singular { + object = maybeProduct.(*Product) + } else { + slice = *maybeProduct.(*[]*Product) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &productR{} + } + args = append(args, object.ProductID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &productR{} + } + + for _, a := range args { + if a == obj.ProductID { + continue Outer + } + } + + args = append(args, obj.ProductID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`formulations`), + qm.WhereIn(`formulations.product_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load formulations") + } + + var resultSlice []*Formulation + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice formulations") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on formulations") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for formulations") + } + + if len(formulationAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.Formulations = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &formulationR{} + } + foreign.R.Product = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.ProductID == foreign.ProductID { + local.R.Formulations = append(local.R.Formulations, foreign) + if foreign.R == nil { + foreign.R = &formulationR{} + } + foreign.R.Product = local + break + } + } + } + + return nil +} + +// LoadProductPatents allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (productL) LoadProductPatents(ctx context.Context, e boil.ContextExecutor, singular bool, maybeProduct interface{}, mods queries.Applicator) error { + var slice []*Product + var object *Product + + if singular { + object = maybeProduct.(*Product) + } else { + slice = *maybeProduct.(*[]*Product) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &productR{} + } + args = append(args, object.ProductID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &productR{} + } + + for _, a := range args { + if a == obj.ProductID { + continue Outer + } + } + + args = append(args, obj.ProductID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`product_patents`), + qm.WhereIn(`product_patents.product_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load product_patents") + } + + var resultSlice []*ProductPatent + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice product_patents") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on product_patents") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for product_patents") + } + + if len(productPatentAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.ProductPatents = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &productPatentR{} + } + foreign.R.Product = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.ProductID == foreign.ProductID { + local.R.ProductPatents = append(local.R.ProductPatents, foreign) + if foreign.R == nil { + foreign.R = &productPatentR{} + } + foreign.R.Product = local + break + } + } + } + + return nil +} + +// AddFormulations adds the given related objects to the existing relationships +// of the product, optionally inserting them as new records. +// Appends related to o.R.Formulations. +// Sets related.R.Product appropriately. +func (o *Product) AddFormulations(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Formulation) error { + var err error + for _, rel := range related { + if insert { + rel.ProductID = o.ProductID + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"formulations\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"product_id"}), + strmangle.WhereClause("\"", "\"", 0, formulationPrimaryKeyColumns), + ) + values := []interface{}{o.ProductID, rel.FormulationID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.ProductID = o.ProductID + } + } + + if o.R == nil { + o.R = &productR{ + Formulations: related, + } + } else { + o.R.Formulations = append(o.R.Formulations, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &formulationR{ + Product: o, + } + } else { + rel.R.Product = o + } + } + return nil +} + +// AddProductPatents adds the given related objects to the existing relationships +// of the product, optionally inserting them as new records. +// Appends related to o.R.ProductPatents. +// Sets related.R.Product appropriately. +func (o *Product) AddProductPatents(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*ProductPatent) error { + var err error + for _, rel := range related { + if insert { + rel.ProductID = o.ProductID + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"product_patents\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"product_id"}), + strmangle.WhereClause("\"", "\"", 0, productPatentPrimaryKeyColumns), + ) + values := []interface{}{o.ProductID, rel.ProdPatID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.ProductID = o.ProductID + } + } + + if o.R == nil { + o.R = &productR{ + ProductPatents: related, + } + } else { + o.R.ProductPatents = append(o.R.ProductPatents, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &productPatentR{ + Product: o, + } + } else { + rel.R.Product = o + } + } + return nil +} + +// Products retrieves all the records using an executor. +func Products(mods ...qm.QueryMod) productQuery { + mods = append(mods, qm.From("\"products\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"products\".*"}) + } + + return productQuery{q} +} + +// FindProduct retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindProduct(ctx context.Context, exec boil.ContextExecutor, productID string, selectCols ...string) (*Product, error) { + productObj := &Product{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"products\" where \"product_id\"=?", sel, + ) + + q := queries.Raw(query, productID) + + err := q.Bind(ctx, exec, productObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from products") + } + + if err = productObj.doAfterSelectHooks(ctx, exec); err != nil { + return productObj, err + } + + return productObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *Product) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no products provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(productColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + productInsertCacheMut.RLock() + cache, cached := productInsertCache[key] + productInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + productAllColumns, + productColumnsWithDefault, + productColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(productType, productMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(productType, productMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"products\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"products\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into products") + } + + if !cached { + productInsertCacheMut.Lock() + productInsertCache[key] = cache + productInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the Product. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *Product) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + productUpdateCacheMut.RLock() + cache, cached := productUpdateCache[key] + productUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + productAllColumns, + productPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update products, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"products\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, productPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(productType, productMapping, append(wl, productPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update products row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for products") + } + + if !cached { + productUpdateCacheMut.Lock() + productUpdateCache[key] = cache + productUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q productQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for products") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for products") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o ProductSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), productPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"products\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, productPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in product slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all product") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *Product) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no products provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(productColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + productUpsertCacheMut.RLock() + cache, cached := productUpsertCache[key] + productUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + productAllColumns, + productColumnsWithDefault, + productColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + productAllColumns, + productPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert products, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(productPrimaryKeyColumns)) + copy(conflict, productPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"products\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(productType, productMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(productType, productMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert products") + } + + if !cached { + productUpsertCacheMut.Lock() + productUpsertCache[key] = cache + productUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single Product record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *Product) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no Product provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), productPrimaryKeyMapping) + sql := "DELETE FROM \"products\" WHERE \"product_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from products") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for products") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q productQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no productQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from products") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for products") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o ProductSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(productBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), productPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"products\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, productPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from product slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for products") + } + + if len(productAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *Product) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindProduct(ctx, exec, o.ProductID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *ProductSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := ProductSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), productPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"products\".* FROM \"products\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, productPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in ProductSlice") + } + + *o = slice + + return nil +} + +// ProductExists checks if the Product row exists. +func ProductExists(ctx context.Context, exec boil.ContextExecutor, productID string) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"products\" where \"product_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, productID) + } + row := exec.QueryRowContext(ctx, sql, productID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if products exists") + } + + return exists, nil +} diff --git a/models/protein_class_synonyms.go b/models/protein_class_synonyms.go new file mode 100644 index 0000000..076c050 --- /dev/null +++ b/models/protein_class_synonyms.go @@ -0,0 +1,1077 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// ProteinClassSynonym is an object representing the database table. +type ProteinClassSynonym struct { + ProtclasssynID int64 `boil:"protclasssyn_id" json:"protclasssyn_id" toml:"protclasssyn_id" yaml:"protclasssyn_id"` + ProteinClassID int64 `boil:"protein_class_id" json:"protein_class_id" toml:"protein_class_id" yaml:"protein_class_id"` + ProteinClassSynonym null.String `boil:"protein_class_synonym" json:"protein_class_synonym,omitempty" toml:"protein_class_synonym" yaml:"protein_class_synonym,omitempty"` + SynType null.String `boil:"syn_type" json:"syn_type,omitempty" toml:"syn_type" yaml:"syn_type,omitempty"` + + R *proteinClassSynonymR `boil:"-" json:"-" toml:"-" yaml:"-"` + L proteinClassSynonymL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var ProteinClassSynonymColumns = struct { + ProtclasssynID string + ProteinClassID string + ProteinClassSynonym string + SynType string +}{ + ProtclasssynID: "protclasssyn_id", + ProteinClassID: "protein_class_id", + ProteinClassSynonym: "protein_class_synonym", + SynType: "syn_type", +} + +var ProteinClassSynonymTableColumns = struct { + ProtclasssynID string + ProteinClassID string + ProteinClassSynonym string + SynType string +}{ + ProtclasssynID: "protein_class_synonyms.protclasssyn_id", + ProteinClassID: "protein_class_synonyms.protein_class_id", + ProteinClassSynonym: "protein_class_synonyms.protein_class_synonym", + SynType: "protein_class_synonyms.syn_type", +} + +// Generated where + +var ProteinClassSynonymWhere = struct { + ProtclasssynID whereHelperint64 + ProteinClassID whereHelperint64 + ProteinClassSynonym whereHelpernull_String + SynType whereHelpernull_String +}{ + ProtclasssynID: whereHelperint64{field: "\"protein_class_synonyms\".\"protclasssyn_id\""}, + ProteinClassID: whereHelperint64{field: "\"protein_class_synonyms\".\"protein_class_id\""}, + ProteinClassSynonym: whereHelpernull_String{field: "\"protein_class_synonyms\".\"protein_class_synonym\""}, + SynType: whereHelpernull_String{field: "\"protein_class_synonyms\".\"syn_type\""}, +} + +// ProteinClassSynonymRels is where relationship names are stored. +var ProteinClassSynonymRels = struct { + ProteinClass string +}{ + ProteinClass: "ProteinClass", +} + +// proteinClassSynonymR is where relationships are stored. +type proteinClassSynonymR struct { + ProteinClass *ProteinClassification `boil:"ProteinClass" json:"ProteinClass" toml:"ProteinClass" yaml:"ProteinClass"` +} + +// NewStruct creates a new relationship struct +func (*proteinClassSynonymR) NewStruct() *proteinClassSynonymR { + return &proteinClassSynonymR{} +} + +func (r *proteinClassSynonymR) GetProteinClass() *ProteinClassification { + if r == nil { + return nil + } + return r.ProteinClass +} + +// proteinClassSynonymL is where Load methods for each relationship are stored. +type proteinClassSynonymL struct{} + +var ( + proteinClassSynonymAllColumns = []string{"protclasssyn_id", "protein_class_id", "protein_class_synonym", "syn_type"} + proteinClassSynonymColumnsWithoutDefault = []string{"protclasssyn_id", "protein_class_id"} + proteinClassSynonymColumnsWithDefault = []string{"protein_class_synonym", "syn_type"} + proteinClassSynonymPrimaryKeyColumns = []string{"protclasssyn_id"} + proteinClassSynonymGeneratedColumns = []string{} +) + +type ( + // ProteinClassSynonymSlice is an alias for a slice of pointers to ProteinClassSynonym. + // This should almost always be used instead of []ProteinClassSynonym. + ProteinClassSynonymSlice []*ProteinClassSynonym + // ProteinClassSynonymHook is the signature for custom ProteinClassSynonym hook methods + ProteinClassSynonymHook func(context.Context, boil.ContextExecutor, *ProteinClassSynonym) error + + proteinClassSynonymQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + proteinClassSynonymType = reflect.TypeOf(&ProteinClassSynonym{}) + proteinClassSynonymMapping = queries.MakeStructMapping(proteinClassSynonymType) + proteinClassSynonymPrimaryKeyMapping, _ = queries.BindMapping(proteinClassSynonymType, proteinClassSynonymMapping, proteinClassSynonymPrimaryKeyColumns) + proteinClassSynonymInsertCacheMut sync.RWMutex + proteinClassSynonymInsertCache = make(map[string]insertCache) + proteinClassSynonymUpdateCacheMut sync.RWMutex + proteinClassSynonymUpdateCache = make(map[string]updateCache) + proteinClassSynonymUpsertCacheMut sync.RWMutex + proteinClassSynonymUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var proteinClassSynonymAfterSelectHooks []ProteinClassSynonymHook + +var proteinClassSynonymBeforeInsertHooks []ProteinClassSynonymHook +var proteinClassSynonymAfterInsertHooks []ProteinClassSynonymHook + +var proteinClassSynonymBeforeUpdateHooks []ProteinClassSynonymHook +var proteinClassSynonymAfterUpdateHooks []ProteinClassSynonymHook + +var proteinClassSynonymBeforeDeleteHooks []ProteinClassSynonymHook +var proteinClassSynonymAfterDeleteHooks []ProteinClassSynonymHook + +var proteinClassSynonymBeforeUpsertHooks []ProteinClassSynonymHook +var proteinClassSynonymAfterUpsertHooks []ProteinClassSynonymHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *ProteinClassSynonym) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range proteinClassSynonymAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *ProteinClassSynonym) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range proteinClassSynonymBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *ProteinClassSynonym) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range proteinClassSynonymAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *ProteinClassSynonym) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range proteinClassSynonymBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *ProteinClassSynonym) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range proteinClassSynonymAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *ProteinClassSynonym) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range proteinClassSynonymBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *ProteinClassSynonym) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range proteinClassSynonymAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *ProteinClassSynonym) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range proteinClassSynonymBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *ProteinClassSynonym) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range proteinClassSynonymAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddProteinClassSynonymHook registers your hook function for all future operations. +func AddProteinClassSynonymHook(hookPoint boil.HookPoint, proteinClassSynonymHook ProteinClassSynonymHook) { + switch hookPoint { + case boil.AfterSelectHook: + proteinClassSynonymAfterSelectHooks = append(proteinClassSynonymAfterSelectHooks, proteinClassSynonymHook) + case boil.BeforeInsertHook: + proteinClassSynonymBeforeInsertHooks = append(proteinClassSynonymBeforeInsertHooks, proteinClassSynonymHook) + case boil.AfterInsertHook: + proteinClassSynonymAfterInsertHooks = append(proteinClassSynonymAfterInsertHooks, proteinClassSynonymHook) + case boil.BeforeUpdateHook: + proteinClassSynonymBeforeUpdateHooks = append(proteinClassSynonymBeforeUpdateHooks, proteinClassSynonymHook) + case boil.AfterUpdateHook: + proteinClassSynonymAfterUpdateHooks = append(proteinClassSynonymAfterUpdateHooks, proteinClassSynonymHook) + case boil.BeforeDeleteHook: + proteinClassSynonymBeforeDeleteHooks = append(proteinClassSynonymBeforeDeleteHooks, proteinClassSynonymHook) + case boil.AfterDeleteHook: + proteinClassSynonymAfterDeleteHooks = append(proteinClassSynonymAfterDeleteHooks, proteinClassSynonymHook) + case boil.BeforeUpsertHook: + proteinClassSynonymBeforeUpsertHooks = append(proteinClassSynonymBeforeUpsertHooks, proteinClassSynonymHook) + case boil.AfterUpsertHook: + proteinClassSynonymAfterUpsertHooks = append(proteinClassSynonymAfterUpsertHooks, proteinClassSynonymHook) + } +} + +// One returns a single proteinClassSynonym record from the query. +func (q proteinClassSynonymQuery) One(ctx context.Context, exec boil.ContextExecutor) (*ProteinClassSynonym, error) { + o := &ProteinClassSynonym{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for protein_class_synonyms") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all ProteinClassSynonym records from the query. +func (q proteinClassSynonymQuery) All(ctx context.Context, exec boil.ContextExecutor) (ProteinClassSynonymSlice, error) { + var o []*ProteinClassSynonym + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to ProteinClassSynonym slice") + } + + if len(proteinClassSynonymAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all ProteinClassSynonym records in the query. +func (q proteinClassSynonymQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count protein_class_synonyms rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q proteinClassSynonymQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if protein_class_synonyms exists") + } + + return count > 0, nil +} + +// ProteinClass pointed to by the foreign key. +func (o *ProteinClassSynonym) ProteinClass(mods ...qm.QueryMod) proteinClassificationQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"protein_class_id\" = ?", o.ProteinClassID), + } + + queryMods = append(queryMods, mods...) + + return ProteinClassifications(queryMods...) +} + +// LoadProteinClass allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (proteinClassSynonymL) LoadProteinClass(ctx context.Context, e boil.ContextExecutor, singular bool, maybeProteinClassSynonym interface{}, mods queries.Applicator) error { + var slice []*ProteinClassSynonym + var object *ProteinClassSynonym + + if singular { + object = maybeProteinClassSynonym.(*ProteinClassSynonym) + } else { + slice = *maybeProteinClassSynonym.(*[]*ProteinClassSynonym) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &proteinClassSynonymR{} + } + args = append(args, object.ProteinClassID) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &proteinClassSynonymR{} + } + + for _, a := range args { + if a == obj.ProteinClassID { + continue Outer + } + } + + args = append(args, obj.ProteinClassID) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`protein_classification`), + qm.WhereIn(`protein_classification.protein_class_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load ProteinClassification") + } + + var resultSlice []*ProteinClassification + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice ProteinClassification") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for protein_classification") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for protein_classification") + } + + if len(proteinClassSynonymAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.ProteinClass = foreign + if foreign.R == nil { + foreign.R = &proteinClassificationR{} + } + foreign.R.ProteinClassProteinClassSynonyms = append(foreign.R.ProteinClassProteinClassSynonyms, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.ProteinClassID == foreign.ProteinClassID { + local.R.ProteinClass = foreign + if foreign.R == nil { + foreign.R = &proteinClassificationR{} + } + foreign.R.ProteinClassProteinClassSynonyms = append(foreign.R.ProteinClassProteinClassSynonyms, local) + break + } + } + } + + return nil +} + +// SetProteinClass of the proteinClassSynonym to the related item. +// Sets o.R.ProteinClass to related. +// Adds o to related.R.ProteinClassProteinClassSynonyms. +func (o *ProteinClassSynonym) SetProteinClass(ctx context.Context, exec boil.ContextExecutor, insert bool, related *ProteinClassification) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"protein_class_synonyms\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"protein_class_id"}), + strmangle.WhereClause("\"", "\"", 0, proteinClassSynonymPrimaryKeyColumns), + ) + values := []interface{}{related.ProteinClassID, o.ProtclasssynID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.ProteinClassID = related.ProteinClassID + if o.R == nil { + o.R = &proteinClassSynonymR{ + ProteinClass: related, + } + } else { + o.R.ProteinClass = related + } + + if related.R == nil { + related.R = &proteinClassificationR{ + ProteinClassProteinClassSynonyms: ProteinClassSynonymSlice{o}, + } + } else { + related.R.ProteinClassProteinClassSynonyms = append(related.R.ProteinClassProteinClassSynonyms, o) + } + + return nil +} + +// ProteinClassSynonyms retrieves all the records using an executor. +func ProteinClassSynonyms(mods ...qm.QueryMod) proteinClassSynonymQuery { + mods = append(mods, qm.From("\"protein_class_synonyms\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"protein_class_synonyms\".*"}) + } + + return proteinClassSynonymQuery{q} +} + +// FindProteinClassSynonym retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindProteinClassSynonym(ctx context.Context, exec boil.ContextExecutor, protclasssynID int64, selectCols ...string) (*ProteinClassSynonym, error) { + proteinClassSynonymObj := &ProteinClassSynonym{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"protein_class_synonyms\" where \"protclasssyn_id\"=?", sel, + ) + + q := queries.Raw(query, protclasssynID) + + err := q.Bind(ctx, exec, proteinClassSynonymObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from protein_class_synonyms") + } + + if err = proteinClassSynonymObj.doAfterSelectHooks(ctx, exec); err != nil { + return proteinClassSynonymObj, err + } + + return proteinClassSynonymObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *ProteinClassSynonym) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no protein_class_synonyms provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(proteinClassSynonymColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + proteinClassSynonymInsertCacheMut.RLock() + cache, cached := proteinClassSynonymInsertCache[key] + proteinClassSynonymInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + proteinClassSynonymAllColumns, + proteinClassSynonymColumnsWithDefault, + proteinClassSynonymColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(proteinClassSynonymType, proteinClassSynonymMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(proteinClassSynonymType, proteinClassSynonymMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"protein_class_synonyms\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"protein_class_synonyms\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into protein_class_synonyms") + } + + if !cached { + proteinClassSynonymInsertCacheMut.Lock() + proteinClassSynonymInsertCache[key] = cache + proteinClassSynonymInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the ProteinClassSynonym. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *ProteinClassSynonym) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + proteinClassSynonymUpdateCacheMut.RLock() + cache, cached := proteinClassSynonymUpdateCache[key] + proteinClassSynonymUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + proteinClassSynonymAllColumns, + proteinClassSynonymPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update protein_class_synonyms, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"protein_class_synonyms\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, proteinClassSynonymPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(proteinClassSynonymType, proteinClassSynonymMapping, append(wl, proteinClassSynonymPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update protein_class_synonyms row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for protein_class_synonyms") + } + + if !cached { + proteinClassSynonymUpdateCacheMut.Lock() + proteinClassSynonymUpdateCache[key] = cache + proteinClassSynonymUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q proteinClassSynonymQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for protein_class_synonyms") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for protein_class_synonyms") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o ProteinClassSynonymSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), proteinClassSynonymPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"protein_class_synonyms\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, proteinClassSynonymPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in proteinClassSynonym slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all proteinClassSynonym") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *ProteinClassSynonym) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no protein_class_synonyms provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(proteinClassSynonymColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + proteinClassSynonymUpsertCacheMut.RLock() + cache, cached := proteinClassSynonymUpsertCache[key] + proteinClassSynonymUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + proteinClassSynonymAllColumns, + proteinClassSynonymColumnsWithDefault, + proteinClassSynonymColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + proteinClassSynonymAllColumns, + proteinClassSynonymPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert protein_class_synonyms, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(proteinClassSynonymPrimaryKeyColumns)) + copy(conflict, proteinClassSynonymPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"protein_class_synonyms\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(proteinClassSynonymType, proteinClassSynonymMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(proteinClassSynonymType, proteinClassSynonymMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert protein_class_synonyms") + } + + if !cached { + proteinClassSynonymUpsertCacheMut.Lock() + proteinClassSynonymUpsertCache[key] = cache + proteinClassSynonymUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single ProteinClassSynonym record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *ProteinClassSynonym) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no ProteinClassSynonym provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), proteinClassSynonymPrimaryKeyMapping) + sql := "DELETE FROM \"protein_class_synonyms\" WHERE \"protclasssyn_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from protein_class_synonyms") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for protein_class_synonyms") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q proteinClassSynonymQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no proteinClassSynonymQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from protein_class_synonyms") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for protein_class_synonyms") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o ProteinClassSynonymSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(proteinClassSynonymBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), proteinClassSynonymPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"protein_class_synonyms\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, proteinClassSynonymPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from proteinClassSynonym slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for protein_class_synonyms") + } + + if len(proteinClassSynonymAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *ProteinClassSynonym) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindProteinClassSynonym(ctx, exec, o.ProtclasssynID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *ProteinClassSynonymSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := ProteinClassSynonymSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), proteinClassSynonymPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"protein_class_synonyms\".* FROM \"protein_class_synonyms\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, proteinClassSynonymPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in ProteinClassSynonymSlice") + } + + *o = slice + + return nil +} + +// ProteinClassSynonymExists checks if the ProteinClassSynonym row exists. +func ProteinClassSynonymExists(ctx context.Context, exec boil.ContextExecutor, protclasssynID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"protein_class_synonyms\" where \"protclasssyn_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, protclasssynID) + } + row := exec.QueryRowContext(ctx, sql, protclasssynID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if protein_class_synonyms exists") + } + + return exists, nil +} diff --git a/models/protein_classification.go b/models/protein_classification.go new file mode 100644 index 0000000..25f93de --- /dev/null +++ b/models/protein_classification.go @@ -0,0 +1,1276 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// ProteinClassification is an object representing the database table. +type ProteinClassification struct { + ProteinClassID int64 `boil:"protein_class_id" json:"protein_class_id" toml:"protein_class_id" yaml:"protein_class_id"` + ParentID null.Int64 `boil:"parent_id" json:"parent_id,omitempty" toml:"parent_id" yaml:"parent_id,omitempty"` + PrefName null.String `boil:"pref_name" json:"pref_name,omitempty" toml:"pref_name" yaml:"pref_name,omitempty"` + ShortName null.String `boil:"short_name" json:"short_name,omitempty" toml:"short_name" yaml:"short_name,omitempty"` + ProteinClassDesc string `boil:"protein_class_desc" json:"protein_class_desc" toml:"protein_class_desc" yaml:"protein_class_desc"` + Definition null.String `boil:"definition" json:"definition,omitempty" toml:"definition" yaml:"definition,omitempty"` + ClassLevel int64 `boil:"class_level" json:"class_level" toml:"class_level" yaml:"class_level"` + + R *proteinClassificationR `boil:"-" json:"-" toml:"-" yaml:"-"` + L proteinClassificationL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var ProteinClassificationColumns = struct { + ProteinClassID string + ParentID string + PrefName string + ShortName string + ProteinClassDesc string + Definition string + ClassLevel string +}{ + ProteinClassID: "protein_class_id", + ParentID: "parent_id", + PrefName: "pref_name", + ShortName: "short_name", + ProteinClassDesc: "protein_class_desc", + Definition: "definition", + ClassLevel: "class_level", +} + +var ProteinClassificationTableColumns = struct { + ProteinClassID string + ParentID string + PrefName string + ShortName string + ProteinClassDesc string + Definition string + ClassLevel string +}{ + ProteinClassID: "protein_classification.protein_class_id", + ParentID: "protein_classification.parent_id", + PrefName: "protein_classification.pref_name", + ShortName: "protein_classification.short_name", + ProteinClassDesc: "protein_classification.protein_class_desc", + Definition: "protein_classification.definition", + ClassLevel: "protein_classification.class_level", +} + +// Generated where + +var ProteinClassificationWhere = struct { + ProteinClassID whereHelperint64 + ParentID whereHelpernull_Int64 + PrefName whereHelpernull_String + ShortName whereHelpernull_String + ProteinClassDesc whereHelperstring + Definition whereHelpernull_String + ClassLevel whereHelperint64 +}{ + ProteinClassID: whereHelperint64{field: "\"protein_classification\".\"protein_class_id\""}, + ParentID: whereHelpernull_Int64{field: "\"protein_classification\".\"parent_id\""}, + PrefName: whereHelpernull_String{field: "\"protein_classification\".\"pref_name\""}, + ShortName: whereHelpernull_String{field: "\"protein_classification\".\"short_name\""}, + ProteinClassDesc: whereHelperstring{field: "\"protein_classification\".\"protein_class_desc\""}, + Definition: whereHelpernull_String{field: "\"protein_classification\".\"definition\""}, + ClassLevel: whereHelperint64{field: "\"protein_classification\".\"class_level\""}, +} + +// ProteinClassificationRels is where relationship names are stored. +var ProteinClassificationRels = struct { + ProteinClassComponentClasses string + ProteinClassProteinClassSynonyms string +}{ + ProteinClassComponentClasses: "ProteinClassComponentClasses", + ProteinClassProteinClassSynonyms: "ProteinClassProteinClassSynonyms", +} + +// proteinClassificationR is where relationships are stored. +type proteinClassificationR struct { + ProteinClassComponentClasses ComponentClassSlice `boil:"ProteinClassComponentClasses" json:"ProteinClassComponentClasses" toml:"ProteinClassComponentClasses" yaml:"ProteinClassComponentClasses"` + ProteinClassProteinClassSynonyms ProteinClassSynonymSlice `boil:"ProteinClassProteinClassSynonyms" json:"ProteinClassProteinClassSynonyms" toml:"ProteinClassProteinClassSynonyms" yaml:"ProteinClassProteinClassSynonyms"` +} + +// NewStruct creates a new relationship struct +func (*proteinClassificationR) NewStruct() *proteinClassificationR { + return &proteinClassificationR{} +} + +func (r *proteinClassificationR) GetProteinClassComponentClasses() ComponentClassSlice { + if r == nil { + return nil + } + return r.ProteinClassComponentClasses +} + +func (r *proteinClassificationR) GetProteinClassProteinClassSynonyms() ProteinClassSynonymSlice { + if r == nil { + return nil + } + return r.ProteinClassProteinClassSynonyms +} + +// proteinClassificationL is where Load methods for each relationship are stored. +type proteinClassificationL struct{} + +var ( + proteinClassificationAllColumns = []string{"protein_class_id", "parent_id", "pref_name", "short_name", "protein_class_desc", "definition", "class_level"} + proteinClassificationColumnsWithoutDefault = []string{"protein_class_id", "protein_class_desc", "class_level"} + proteinClassificationColumnsWithDefault = []string{"parent_id", "pref_name", "short_name", "definition"} + proteinClassificationPrimaryKeyColumns = []string{"protein_class_id"} + proteinClassificationGeneratedColumns = []string{} +) + +type ( + // ProteinClassificationSlice is an alias for a slice of pointers to ProteinClassification. + // This should almost always be used instead of []ProteinClassification. + ProteinClassificationSlice []*ProteinClassification + // ProteinClassificationHook is the signature for custom ProteinClassification hook methods + ProteinClassificationHook func(context.Context, boil.ContextExecutor, *ProteinClassification) error + + proteinClassificationQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + proteinClassificationType = reflect.TypeOf(&ProteinClassification{}) + proteinClassificationMapping = queries.MakeStructMapping(proteinClassificationType) + proteinClassificationPrimaryKeyMapping, _ = queries.BindMapping(proteinClassificationType, proteinClassificationMapping, proteinClassificationPrimaryKeyColumns) + proteinClassificationInsertCacheMut sync.RWMutex + proteinClassificationInsertCache = make(map[string]insertCache) + proteinClassificationUpdateCacheMut sync.RWMutex + proteinClassificationUpdateCache = make(map[string]updateCache) + proteinClassificationUpsertCacheMut sync.RWMutex + proteinClassificationUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var proteinClassificationAfterSelectHooks []ProteinClassificationHook + +var proteinClassificationBeforeInsertHooks []ProteinClassificationHook +var proteinClassificationAfterInsertHooks []ProteinClassificationHook + +var proteinClassificationBeforeUpdateHooks []ProteinClassificationHook +var proteinClassificationAfterUpdateHooks []ProteinClassificationHook + +var proteinClassificationBeforeDeleteHooks []ProteinClassificationHook +var proteinClassificationAfterDeleteHooks []ProteinClassificationHook + +var proteinClassificationBeforeUpsertHooks []ProteinClassificationHook +var proteinClassificationAfterUpsertHooks []ProteinClassificationHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *ProteinClassification) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range proteinClassificationAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *ProteinClassification) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range proteinClassificationBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *ProteinClassification) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range proteinClassificationAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *ProteinClassification) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range proteinClassificationBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *ProteinClassification) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range proteinClassificationAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *ProteinClassification) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range proteinClassificationBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *ProteinClassification) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range proteinClassificationAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *ProteinClassification) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range proteinClassificationBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *ProteinClassification) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range proteinClassificationAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddProteinClassificationHook registers your hook function for all future operations. +func AddProteinClassificationHook(hookPoint boil.HookPoint, proteinClassificationHook ProteinClassificationHook) { + switch hookPoint { + case boil.AfterSelectHook: + proteinClassificationAfterSelectHooks = append(proteinClassificationAfterSelectHooks, proteinClassificationHook) + case boil.BeforeInsertHook: + proteinClassificationBeforeInsertHooks = append(proteinClassificationBeforeInsertHooks, proteinClassificationHook) + case boil.AfterInsertHook: + proteinClassificationAfterInsertHooks = append(proteinClassificationAfterInsertHooks, proteinClassificationHook) + case boil.BeforeUpdateHook: + proteinClassificationBeforeUpdateHooks = append(proteinClassificationBeforeUpdateHooks, proteinClassificationHook) + case boil.AfterUpdateHook: + proteinClassificationAfterUpdateHooks = append(proteinClassificationAfterUpdateHooks, proteinClassificationHook) + case boil.BeforeDeleteHook: + proteinClassificationBeforeDeleteHooks = append(proteinClassificationBeforeDeleteHooks, proteinClassificationHook) + case boil.AfterDeleteHook: + proteinClassificationAfterDeleteHooks = append(proteinClassificationAfterDeleteHooks, proteinClassificationHook) + case boil.BeforeUpsertHook: + proteinClassificationBeforeUpsertHooks = append(proteinClassificationBeforeUpsertHooks, proteinClassificationHook) + case boil.AfterUpsertHook: + proteinClassificationAfterUpsertHooks = append(proteinClassificationAfterUpsertHooks, proteinClassificationHook) + } +} + +// One returns a single proteinClassification record from the query. +func (q proteinClassificationQuery) One(ctx context.Context, exec boil.ContextExecutor) (*ProteinClassification, error) { + o := &ProteinClassification{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for protein_classification") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all ProteinClassification records from the query. +func (q proteinClassificationQuery) All(ctx context.Context, exec boil.ContextExecutor) (ProteinClassificationSlice, error) { + var o []*ProteinClassification + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to ProteinClassification slice") + } + + if len(proteinClassificationAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all ProteinClassification records in the query. +func (q proteinClassificationQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count protein_classification rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q proteinClassificationQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if protein_classification exists") + } + + return count > 0, nil +} + +// ProteinClassComponentClasses retrieves all the component_class's ComponentClasses with an executor via protein_class_id column. +func (o *ProteinClassification) ProteinClassComponentClasses(mods ...qm.QueryMod) componentClassQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"component_class\".\"protein_class_id\"=?", o.ProteinClassID), + ) + + return ComponentClasses(queryMods...) +} + +// ProteinClassProteinClassSynonyms retrieves all the protein_class_synonym's ProteinClassSynonyms with an executor via protein_class_id column. +func (o *ProteinClassification) ProteinClassProteinClassSynonyms(mods ...qm.QueryMod) proteinClassSynonymQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"protein_class_synonyms\".\"protein_class_id\"=?", o.ProteinClassID), + ) + + return ProteinClassSynonyms(queryMods...) +} + +// LoadProteinClassComponentClasses allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (proteinClassificationL) LoadProteinClassComponentClasses(ctx context.Context, e boil.ContextExecutor, singular bool, maybeProteinClassification interface{}, mods queries.Applicator) error { + var slice []*ProteinClassification + var object *ProteinClassification + + if singular { + object = maybeProteinClassification.(*ProteinClassification) + } else { + slice = *maybeProteinClassification.(*[]*ProteinClassification) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &proteinClassificationR{} + } + args = append(args, object.ProteinClassID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &proteinClassificationR{} + } + + for _, a := range args { + if a == obj.ProteinClassID { + continue Outer + } + } + + args = append(args, obj.ProteinClassID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`component_class`), + qm.WhereIn(`component_class.protein_class_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load component_class") + } + + var resultSlice []*ComponentClass + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice component_class") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on component_class") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for component_class") + } + + if len(componentClassAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.ProteinClassComponentClasses = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &componentClassR{} + } + foreign.R.ProteinClass = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.ProteinClassID == foreign.ProteinClassID { + local.R.ProteinClassComponentClasses = append(local.R.ProteinClassComponentClasses, foreign) + if foreign.R == nil { + foreign.R = &componentClassR{} + } + foreign.R.ProteinClass = local + break + } + } + } + + return nil +} + +// LoadProteinClassProteinClassSynonyms allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (proteinClassificationL) LoadProteinClassProteinClassSynonyms(ctx context.Context, e boil.ContextExecutor, singular bool, maybeProteinClassification interface{}, mods queries.Applicator) error { + var slice []*ProteinClassification + var object *ProteinClassification + + if singular { + object = maybeProteinClassification.(*ProteinClassification) + } else { + slice = *maybeProteinClassification.(*[]*ProteinClassification) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &proteinClassificationR{} + } + args = append(args, object.ProteinClassID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &proteinClassificationR{} + } + + for _, a := range args { + if a == obj.ProteinClassID { + continue Outer + } + } + + args = append(args, obj.ProteinClassID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`protein_class_synonyms`), + qm.WhereIn(`protein_class_synonyms.protein_class_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load protein_class_synonyms") + } + + var resultSlice []*ProteinClassSynonym + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice protein_class_synonyms") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on protein_class_synonyms") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for protein_class_synonyms") + } + + if len(proteinClassSynonymAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.ProteinClassProteinClassSynonyms = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &proteinClassSynonymR{} + } + foreign.R.ProteinClass = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.ProteinClassID == foreign.ProteinClassID { + local.R.ProteinClassProteinClassSynonyms = append(local.R.ProteinClassProteinClassSynonyms, foreign) + if foreign.R == nil { + foreign.R = &proteinClassSynonymR{} + } + foreign.R.ProteinClass = local + break + } + } + } + + return nil +} + +// AddProteinClassComponentClasses adds the given related objects to the existing relationships +// of the protein_classification, optionally inserting them as new records. +// Appends related to o.R.ProteinClassComponentClasses. +// Sets related.R.ProteinClass appropriately. +func (o *ProteinClassification) AddProteinClassComponentClasses(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*ComponentClass) error { + var err error + for _, rel := range related { + if insert { + rel.ProteinClassID = o.ProteinClassID + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"component_class\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"protein_class_id"}), + strmangle.WhereClause("\"", "\"", 0, componentClassPrimaryKeyColumns), + ) + values := []interface{}{o.ProteinClassID, rel.CompClassID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.ProteinClassID = o.ProteinClassID + } + } + + if o.R == nil { + o.R = &proteinClassificationR{ + ProteinClassComponentClasses: related, + } + } else { + o.R.ProteinClassComponentClasses = append(o.R.ProteinClassComponentClasses, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &componentClassR{ + ProteinClass: o, + } + } else { + rel.R.ProteinClass = o + } + } + return nil +} + +// AddProteinClassProteinClassSynonyms adds the given related objects to the existing relationships +// of the protein_classification, optionally inserting them as new records. +// Appends related to o.R.ProteinClassProteinClassSynonyms. +// Sets related.R.ProteinClass appropriately. +func (o *ProteinClassification) AddProteinClassProteinClassSynonyms(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*ProteinClassSynonym) error { + var err error + for _, rel := range related { + if insert { + rel.ProteinClassID = o.ProteinClassID + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"protein_class_synonyms\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"protein_class_id"}), + strmangle.WhereClause("\"", "\"", 0, proteinClassSynonymPrimaryKeyColumns), + ) + values := []interface{}{o.ProteinClassID, rel.ProtclasssynID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.ProteinClassID = o.ProteinClassID + } + } + + if o.R == nil { + o.R = &proteinClassificationR{ + ProteinClassProteinClassSynonyms: related, + } + } else { + o.R.ProteinClassProteinClassSynonyms = append(o.R.ProteinClassProteinClassSynonyms, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &proteinClassSynonymR{ + ProteinClass: o, + } + } else { + rel.R.ProteinClass = o + } + } + return nil +} + +// ProteinClassifications retrieves all the records using an executor. +func ProteinClassifications(mods ...qm.QueryMod) proteinClassificationQuery { + mods = append(mods, qm.From("\"protein_classification\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"protein_classification\".*"}) + } + + return proteinClassificationQuery{q} +} + +// FindProteinClassification retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindProteinClassification(ctx context.Context, exec boil.ContextExecutor, proteinClassID int64, selectCols ...string) (*ProteinClassification, error) { + proteinClassificationObj := &ProteinClassification{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"protein_classification\" where \"protein_class_id\"=?", sel, + ) + + q := queries.Raw(query, proteinClassID) + + err := q.Bind(ctx, exec, proteinClassificationObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from protein_classification") + } + + if err = proteinClassificationObj.doAfterSelectHooks(ctx, exec); err != nil { + return proteinClassificationObj, err + } + + return proteinClassificationObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *ProteinClassification) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no protein_classification provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(proteinClassificationColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + proteinClassificationInsertCacheMut.RLock() + cache, cached := proteinClassificationInsertCache[key] + proteinClassificationInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + proteinClassificationAllColumns, + proteinClassificationColumnsWithDefault, + proteinClassificationColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(proteinClassificationType, proteinClassificationMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(proteinClassificationType, proteinClassificationMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"protein_classification\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"protein_classification\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into protein_classification") + } + + if !cached { + proteinClassificationInsertCacheMut.Lock() + proteinClassificationInsertCache[key] = cache + proteinClassificationInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the ProteinClassification. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *ProteinClassification) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + proteinClassificationUpdateCacheMut.RLock() + cache, cached := proteinClassificationUpdateCache[key] + proteinClassificationUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + proteinClassificationAllColumns, + proteinClassificationPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update protein_classification, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"protein_classification\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, proteinClassificationPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(proteinClassificationType, proteinClassificationMapping, append(wl, proteinClassificationPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update protein_classification row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for protein_classification") + } + + if !cached { + proteinClassificationUpdateCacheMut.Lock() + proteinClassificationUpdateCache[key] = cache + proteinClassificationUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q proteinClassificationQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for protein_classification") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for protein_classification") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o ProteinClassificationSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), proteinClassificationPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"protein_classification\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, proteinClassificationPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in proteinClassification slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all proteinClassification") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *ProteinClassification) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no protein_classification provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(proteinClassificationColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + proteinClassificationUpsertCacheMut.RLock() + cache, cached := proteinClassificationUpsertCache[key] + proteinClassificationUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + proteinClassificationAllColumns, + proteinClassificationColumnsWithDefault, + proteinClassificationColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + proteinClassificationAllColumns, + proteinClassificationPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert protein_classification, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(proteinClassificationPrimaryKeyColumns)) + copy(conflict, proteinClassificationPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"protein_classification\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(proteinClassificationType, proteinClassificationMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(proteinClassificationType, proteinClassificationMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert protein_classification") + } + + if !cached { + proteinClassificationUpsertCacheMut.Lock() + proteinClassificationUpsertCache[key] = cache + proteinClassificationUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single ProteinClassification record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *ProteinClassification) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no ProteinClassification provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), proteinClassificationPrimaryKeyMapping) + sql := "DELETE FROM \"protein_classification\" WHERE \"protein_class_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from protein_classification") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for protein_classification") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q proteinClassificationQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no proteinClassificationQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from protein_classification") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for protein_classification") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o ProteinClassificationSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(proteinClassificationBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), proteinClassificationPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"protein_classification\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, proteinClassificationPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from proteinClassification slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for protein_classification") + } + + if len(proteinClassificationAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *ProteinClassification) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindProteinClassification(ctx, exec, o.ProteinClassID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *ProteinClassificationSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := ProteinClassificationSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), proteinClassificationPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"protein_classification\".* FROM \"protein_classification\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, proteinClassificationPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in ProteinClassificationSlice") + } + + *o = slice + + return nil +} + +// ProteinClassificationExists checks if the ProteinClassification row exists. +func ProteinClassificationExists(ctx context.Context, exec boil.ContextExecutor, proteinClassID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"protein_classification\" where \"protein_class_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, proteinClassID) + } + row := exec.QueryRowContext(ctx, sql, proteinClassID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if protein_classification exists") + } + + return exists, nil +} diff --git a/models/protein_family_classification.go b/models/protein_family_classification.go new file mode 100644 index 0000000..54c4aa7 --- /dev/null +++ b/models/protein_family_classification.go @@ -0,0 +1,946 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// ProteinFamilyClassification is an object representing the database table. +type ProteinFamilyClassification struct { + ProteinClassID int64 `boil:"protein_class_id" json:"protein_class_id" toml:"protein_class_id" yaml:"protein_class_id"` + ProteinClassDesc string `boil:"protein_class_desc" json:"protein_class_desc" toml:"protein_class_desc" yaml:"protein_class_desc"` + L1 string `boil:"l1" json:"l1" toml:"l1" yaml:"l1"` + L2 null.String `boil:"l2" json:"l2,omitempty" toml:"l2" yaml:"l2,omitempty"` + L3 null.String `boil:"l3" json:"l3,omitempty" toml:"l3" yaml:"l3,omitempty"` + L4 null.String `boil:"l4" json:"l4,omitempty" toml:"l4" yaml:"l4,omitempty"` + L5 null.String `boil:"l5" json:"l5,omitempty" toml:"l5" yaml:"l5,omitempty"` + L6 null.String `boil:"l6" json:"l6,omitempty" toml:"l6" yaml:"l6,omitempty"` + L7 null.String `boil:"l7" json:"l7,omitempty" toml:"l7" yaml:"l7,omitempty"` + L8 null.String `boil:"l8" json:"l8,omitempty" toml:"l8" yaml:"l8,omitempty"` + + R *proteinFamilyClassificationR `boil:"-" json:"-" toml:"-" yaml:"-"` + L proteinFamilyClassificationL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var ProteinFamilyClassificationColumns = struct { + ProteinClassID string + ProteinClassDesc string + L1 string + L2 string + L3 string + L4 string + L5 string + L6 string + L7 string + L8 string +}{ + ProteinClassID: "protein_class_id", + ProteinClassDesc: "protein_class_desc", + L1: "l1", + L2: "l2", + L3: "l3", + L4: "l4", + L5: "l5", + L6: "l6", + L7: "l7", + L8: "l8", +} + +var ProteinFamilyClassificationTableColumns = struct { + ProteinClassID string + ProteinClassDesc string + L1 string + L2 string + L3 string + L4 string + L5 string + L6 string + L7 string + L8 string +}{ + ProteinClassID: "protein_family_classification.protein_class_id", + ProteinClassDesc: "protein_family_classification.protein_class_desc", + L1: "protein_family_classification.l1", + L2: "protein_family_classification.l2", + L3: "protein_family_classification.l3", + L4: "protein_family_classification.l4", + L5: "protein_family_classification.l5", + L6: "protein_family_classification.l6", + L7: "protein_family_classification.l7", + L8: "protein_family_classification.l8", +} + +// Generated where + +var ProteinFamilyClassificationWhere = struct { + ProteinClassID whereHelperint64 + ProteinClassDesc whereHelperstring + L1 whereHelperstring + L2 whereHelpernull_String + L3 whereHelpernull_String + L4 whereHelpernull_String + L5 whereHelpernull_String + L6 whereHelpernull_String + L7 whereHelpernull_String + L8 whereHelpernull_String +}{ + ProteinClassID: whereHelperint64{field: "\"protein_family_classification\".\"protein_class_id\""}, + ProteinClassDesc: whereHelperstring{field: "\"protein_family_classification\".\"protein_class_desc\""}, + L1: whereHelperstring{field: "\"protein_family_classification\".\"l1\""}, + L2: whereHelpernull_String{field: "\"protein_family_classification\".\"l2\""}, + L3: whereHelpernull_String{field: "\"protein_family_classification\".\"l3\""}, + L4: whereHelpernull_String{field: "\"protein_family_classification\".\"l4\""}, + L5: whereHelpernull_String{field: "\"protein_family_classification\".\"l5\""}, + L6: whereHelpernull_String{field: "\"protein_family_classification\".\"l6\""}, + L7: whereHelpernull_String{field: "\"protein_family_classification\".\"l7\""}, + L8: whereHelpernull_String{field: "\"protein_family_classification\".\"l8\""}, +} + +// ProteinFamilyClassificationRels is where relationship names are stored. +var ProteinFamilyClassificationRels = struct { +}{} + +// proteinFamilyClassificationR is where relationships are stored. +type proteinFamilyClassificationR struct { +} + +// NewStruct creates a new relationship struct +func (*proteinFamilyClassificationR) NewStruct() *proteinFamilyClassificationR { + return &proteinFamilyClassificationR{} +} + +// proteinFamilyClassificationL is where Load methods for each relationship are stored. +type proteinFamilyClassificationL struct{} + +var ( + proteinFamilyClassificationAllColumns = []string{"protein_class_id", "protein_class_desc", "l1", "l2", "l3", "l4", "l5", "l6", "l7", "l8"} + proteinFamilyClassificationColumnsWithoutDefault = []string{"protein_class_id", "protein_class_desc", "l1"} + proteinFamilyClassificationColumnsWithDefault = []string{"l2", "l3", "l4", "l5", "l6", "l7", "l8"} + proteinFamilyClassificationPrimaryKeyColumns = []string{"protein_class_id"} + proteinFamilyClassificationGeneratedColumns = []string{} +) + +type ( + // ProteinFamilyClassificationSlice is an alias for a slice of pointers to ProteinFamilyClassification. + // This should almost always be used instead of []ProteinFamilyClassification. + ProteinFamilyClassificationSlice []*ProteinFamilyClassification + // ProteinFamilyClassificationHook is the signature for custom ProteinFamilyClassification hook methods + ProteinFamilyClassificationHook func(context.Context, boil.ContextExecutor, *ProteinFamilyClassification) error + + proteinFamilyClassificationQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + proteinFamilyClassificationType = reflect.TypeOf(&ProteinFamilyClassification{}) + proteinFamilyClassificationMapping = queries.MakeStructMapping(proteinFamilyClassificationType) + proteinFamilyClassificationPrimaryKeyMapping, _ = queries.BindMapping(proteinFamilyClassificationType, proteinFamilyClassificationMapping, proteinFamilyClassificationPrimaryKeyColumns) + proteinFamilyClassificationInsertCacheMut sync.RWMutex + proteinFamilyClassificationInsertCache = make(map[string]insertCache) + proteinFamilyClassificationUpdateCacheMut sync.RWMutex + proteinFamilyClassificationUpdateCache = make(map[string]updateCache) + proteinFamilyClassificationUpsertCacheMut sync.RWMutex + proteinFamilyClassificationUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var proteinFamilyClassificationAfterSelectHooks []ProteinFamilyClassificationHook + +var proteinFamilyClassificationBeforeInsertHooks []ProteinFamilyClassificationHook +var proteinFamilyClassificationAfterInsertHooks []ProteinFamilyClassificationHook + +var proteinFamilyClassificationBeforeUpdateHooks []ProteinFamilyClassificationHook +var proteinFamilyClassificationAfterUpdateHooks []ProteinFamilyClassificationHook + +var proteinFamilyClassificationBeforeDeleteHooks []ProteinFamilyClassificationHook +var proteinFamilyClassificationAfterDeleteHooks []ProteinFamilyClassificationHook + +var proteinFamilyClassificationBeforeUpsertHooks []ProteinFamilyClassificationHook +var proteinFamilyClassificationAfterUpsertHooks []ProteinFamilyClassificationHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *ProteinFamilyClassification) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range proteinFamilyClassificationAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *ProteinFamilyClassification) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range proteinFamilyClassificationBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *ProteinFamilyClassification) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range proteinFamilyClassificationAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *ProteinFamilyClassification) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range proteinFamilyClassificationBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *ProteinFamilyClassification) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range proteinFamilyClassificationAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *ProteinFamilyClassification) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range proteinFamilyClassificationBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *ProteinFamilyClassification) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range proteinFamilyClassificationAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *ProteinFamilyClassification) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range proteinFamilyClassificationBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *ProteinFamilyClassification) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range proteinFamilyClassificationAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddProteinFamilyClassificationHook registers your hook function for all future operations. +func AddProteinFamilyClassificationHook(hookPoint boil.HookPoint, proteinFamilyClassificationHook ProteinFamilyClassificationHook) { + switch hookPoint { + case boil.AfterSelectHook: + proteinFamilyClassificationAfterSelectHooks = append(proteinFamilyClassificationAfterSelectHooks, proteinFamilyClassificationHook) + case boil.BeforeInsertHook: + proteinFamilyClassificationBeforeInsertHooks = append(proteinFamilyClassificationBeforeInsertHooks, proteinFamilyClassificationHook) + case boil.AfterInsertHook: + proteinFamilyClassificationAfterInsertHooks = append(proteinFamilyClassificationAfterInsertHooks, proteinFamilyClassificationHook) + case boil.BeforeUpdateHook: + proteinFamilyClassificationBeforeUpdateHooks = append(proteinFamilyClassificationBeforeUpdateHooks, proteinFamilyClassificationHook) + case boil.AfterUpdateHook: + proteinFamilyClassificationAfterUpdateHooks = append(proteinFamilyClassificationAfterUpdateHooks, proteinFamilyClassificationHook) + case boil.BeforeDeleteHook: + proteinFamilyClassificationBeforeDeleteHooks = append(proteinFamilyClassificationBeforeDeleteHooks, proteinFamilyClassificationHook) + case boil.AfterDeleteHook: + proteinFamilyClassificationAfterDeleteHooks = append(proteinFamilyClassificationAfterDeleteHooks, proteinFamilyClassificationHook) + case boil.BeforeUpsertHook: + proteinFamilyClassificationBeforeUpsertHooks = append(proteinFamilyClassificationBeforeUpsertHooks, proteinFamilyClassificationHook) + case boil.AfterUpsertHook: + proteinFamilyClassificationAfterUpsertHooks = append(proteinFamilyClassificationAfterUpsertHooks, proteinFamilyClassificationHook) + } +} + +// One returns a single proteinFamilyClassification record from the query. +func (q proteinFamilyClassificationQuery) One(ctx context.Context, exec boil.ContextExecutor) (*ProteinFamilyClassification, error) { + o := &ProteinFamilyClassification{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for protein_family_classification") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all ProteinFamilyClassification records from the query. +func (q proteinFamilyClassificationQuery) All(ctx context.Context, exec boil.ContextExecutor) (ProteinFamilyClassificationSlice, error) { + var o []*ProteinFamilyClassification + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to ProteinFamilyClassification slice") + } + + if len(proteinFamilyClassificationAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all ProteinFamilyClassification records in the query. +func (q proteinFamilyClassificationQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count protein_family_classification rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q proteinFamilyClassificationQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if protein_family_classification exists") + } + + return count > 0, nil +} + +// ProteinFamilyClassifications retrieves all the records using an executor. +func ProteinFamilyClassifications(mods ...qm.QueryMod) proteinFamilyClassificationQuery { + mods = append(mods, qm.From("\"protein_family_classification\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"protein_family_classification\".*"}) + } + + return proteinFamilyClassificationQuery{q} +} + +// FindProteinFamilyClassification retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindProteinFamilyClassification(ctx context.Context, exec boil.ContextExecutor, proteinClassID int64, selectCols ...string) (*ProteinFamilyClassification, error) { + proteinFamilyClassificationObj := &ProteinFamilyClassification{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"protein_family_classification\" where \"protein_class_id\"=?", sel, + ) + + q := queries.Raw(query, proteinClassID) + + err := q.Bind(ctx, exec, proteinFamilyClassificationObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from protein_family_classification") + } + + if err = proteinFamilyClassificationObj.doAfterSelectHooks(ctx, exec); err != nil { + return proteinFamilyClassificationObj, err + } + + return proteinFamilyClassificationObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *ProteinFamilyClassification) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no protein_family_classification provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(proteinFamilyClassificationColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + proteinFamilyClassificationInsertCacheMut.RLock() + cache, cached := proteinFamilyClassificationInsertCache[key] + proteinFamilyClassificationInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + proteinFamilyClassificationAllColumns, + proteinFamilyClassificationColumnsWithDefault, + proteinFamilyClassificationColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(proteinFamilyClassificationType, proteinFamilyClassificationMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(proteinFamilyClassificationType, proteinFamilyClassificationMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"protein_family_classification\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"protein_family_classification\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into protein_family_classification") + } + + if !cached { + proteinFamilyClassificationInsertCacheMut.Lock() + proteinFamilyClassificationInsertCache[key] = cache + proteinFamilyClassificationInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the ProteinFamilyClassification. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *ProteinFamilyClassification) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + proteinFamilyClassificationUpdateCacheMut.RLock() + cache, cached := proteinFamilyClassificationUpdateCache[key] + proteinFamilyClassificationUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + proteinFamilyClassificationAllColumns, + proteinFamilyClassificationPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update protein_family_classification, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"protein_family_classification\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, proteinFamilyClassificationPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(proteinFamilyClassificationType, proteinFamilyClassificationMapping, append(wl, proteinFamilyClassificationPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update protein_family_classification row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for protein_family_classification") + } + + if !cached { + proteinFamilyClassificationUpdateCacheMut.Lock() + proteinFamilyClassificationUpdateCache[key] = cache + proteinFamilyClassificationUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q proteinFamilyClassificationQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for protein_family_classification") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for protein_family_classification") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o ProteinFamilyClassificationSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), proteinFamilyClassificationPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"protein_family_classification\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, proteinFamilyClassificationPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in proteinFamilyClassification slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all proteinFamilyClassification") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *ProteinFamilyClassification) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no protein_family_classification provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(proteinFamilyClassificationColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + proteinFamilyClassificationUpsertCacheMut.RLock() + cache, cached := proteinFamilyClassificationUpsertCache[key] + proteinFamilyClassificationUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + proteinFamilyClassificationAllColumns, + proteinFamilyClassificationColumnsWithDefault, + proteinFamilyClassificationColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + proteinFamilyClassificationAllColumns, + proteinFamilyClassificationPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert protein_family_classification, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(proteinFamilyClassificationPrimaryKeyColumns)) + copy(conflict, proteinFamilyClassificationPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"protein_family_classification\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(proteinFamilyClassificationType, proteinFamilyClassificationMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(proteinFamilyClassificationType, proteinFamilyClassificationMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert protein_family_classification") + } + + if !cached { + proteinFamilyClassificationUpsertCacheMut.Lock() + proteinFamilyClassificationUpsertCache[key] = cache + proteinFamilyClassificationUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single ProteinFamilyClassification record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *ProteinFamilyClassification) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no ProteinFamilyClassification provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), proteinFamilyClassificationPrimaryKeyMapping) + sql := "DELETE FROM \"protein_family_classification\" WHERE \"protein_class_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from protein_family_classification") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for protein_family_classification") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q proteinFamilyClassificationQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no proteinFamilyClassificationQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from protein_family_classification") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for protein_family_classification") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o ProteinFamilyClassificationSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(proteinFamilyClassificationBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), proteinFamilyClassificationPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"protein_family_classification\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, proteinFamilyClassificationPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from proteinFamilyClassification slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for protein_family_classification") + } + + if len(proteinFamilyClassificationAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *ProteinFamilyClassification) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindProteinFamilyClassification(ctx, exec, o.ProteinClassID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *ProteinFamilyClassificationSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := ProteinFamilyClassificationSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), proteinFamilyClassificationPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"protein_family_classification\".* FROM \"protein_family_classification\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, proteinFamilyClassificationPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in ProteinFamilyClassificationSlice") + } + + *o = slice + + return nil +} + +// ProteinFamilyClassificationExists checks if the ProteinFamilyClassification row exists. +func ProteinFamilyClassificationExists(ctx context.Context, exec boil.ContextExecutor, proteinClassID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"protein_family_classification\" where \"protein_class_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, proteinClassID) + } + row := exec.QueryRowContext(ctx, sql, proteinClassID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if protein_family_classification exists") + } + + return exists, nil +} diff --git a/models/reaction.go b/models/reaction.go new file mode 100644 index 0000000..cb5b72b --- /dev/null +++ b/models/reaction.go @@ -0,0 +1,1414 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// Reaction is an object representing the database table. +type Reaction struct { + ID null.Int64 `boil:"id" json:"id,omitempty" toml:"id" yaml:"id,omitempty"` + Directional string `boil:"directional" json:"directional" toml:"directional" yaml:"directional"` + Accession null.String `boil:"accession" json:"accession,omitempty" toml:"accession" yaml:"accession,omitempty"` + Status null.String `boil:"status" json:"status,omitempty" toml:"status" yaml:"status,omitempty"` + Comment null.String `boil:"comment" json:"comment,omitempty" toml:"comment" yaml:"comment,omitempty"` + Equation null.String `boil:"equation" json:"equation,omitempty" toml:"equation" yaml:"equation,omitempty"` + HTMLEquation null.String `boil:"html_equation" json:"html_equation,omitempty" toml:"html_equation" yaml:"html_equation,omitempty"` + IsChemicallyBalanced string `boil:"is_chemically_balanced" json:"is_chemically_balanced" toml:"is_chemically_balanced" yaml:"is_chemically_balanced"` + IsTransport string `boil:"is_transport" json:"is_transport" toml:"is_transport" yaml:"is_transport"` + Ec null.String `boil:"ec" json:"ec,omitempty" toml:"ec" yaml:"ec,omitempty"` + Location null.String `boil:"location" json:"location,omitempty" toml:"location" yaml:"location,omitempty"` + + R *reactionR `boil:"-" json:"-" toml:"-" yaml:"-"` + L reactionL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var ReactionColumns = struct { + ID string + Directional string + Accession string + Status string + Comment string + Equation string + HTMLEquation string + IsChemicallyBalanced string + IsTransport string + Ec string + Location string +}{ + ID: "id", + Directional: "directional", + Accession: "accession", + Status: "status", + Comment: "comment", + Equation: "equation", + HTMLEquation: "html_equation", + IsChemicallyBalanced: "is_chemically_balanced", + IsTransport: "is_transport", + Ec: "ec", + Location: "location", +} + +var ReactionTableColumns = struct { + ID string + Directional string + Accession string + Status string + Comment string + Equation string + HTMLEquation string + IsChemicallyBalanced string + IsTransport string + Ec string + Location string +}{ + ID: "reaction.id", + Directional: "reaction.directional", + Accession: "reaction.accession", + Status: "reaction.status", + Comment: "reaction.comment", + Equation: "reaction.equation", + HTMLEquation: "reaction.html_equation", + IsChemicallyBalanced: "reaction.is_chemically_balanced", + IsTransport: "reaction.is_transport", + Ec: "reaction.ec", + Location: "reaction.location", +} + +// Generated where + +var ReactionWhere = struct { + ID whereHelpernull_Int64 + Directional whereHelperstring + Accession whereHelpernull_String + Status whereHelpernull_String + Comment whereHelpernull_String + Equation whereHelpernull_String + HTMLEquation whereHelpernull_String + IsChemicallyBalanced whereHelperstring + IsTransport whereHelperstring + Ec whereHelpernull_String + Location whereHelpernull_String +}{ + ID: whereHelpernull_Int64{field: "\"reaction\".\"id\""}, + Directional: whereHelperstring{field: "\"reaction\".\"directional\""}, + Accession: whereHelpernull_String{field: "\"reaction\".\"accession\""}, + Status: whereHelpernull_String{field: "\"reaction\".\"status\""}, + Comment: whereHelpernull_String{field: "\"reaction\".\"comment\""}, + Equation: whereHelpernull_String{field: "\"reaction\".\"equation\""}, + HTMLEquation: whereHelpernull_String{field: "\"reaction\".\"html_equation\""}, + IsChemicallyBalanced: whereHelperstring{field: "\"reaction\".\"is_chemically_balanced\""}, + IsTransport: whereHelperstring{field: "\"reaction\".\"is_transport\""}, + Ec: whereHelpernull_String{field: "\"reaction\".\"ec\""}, + Location: whereHelpernull_String{field: "\"reaction\".\"location\""}, +} + +// ReactionRels is where relationship names are stored. +var ReactionRels = struct { + ReactionsideReactions string + Uniprots string +}{ + ReactionsideReactions: "ReactionsideReactions", + Uniprots: "Uniprots", +} + +// reactionR is where relationships are stored. +type reactionR struct { + ReactionsideReactions ReactionsideReactionSlice `boil:"ReactionsideReactions" json:"ReactionsideReactions" toml:"ReactionsideReactions" yaml:"ReactionsideReactions"` + Uniprots UniprotSlice `boil:"Uniprots" json:"Uniprots" toml:"Uniprots" yaml:"Uniprots"` +} + +// NewStruct creates a new relationship struct +func (*reactionR) NewStruct() *reactionR { + return &reactionR{} +} + +func (r *reactionR) GetReactionsideReactions() ReactionsideReactionSlice { + if r == nil { + return nil + } + return r.ReactionsideReactions +} + +func (r *reactionR) GetUniprots() UniprotSlice { + if r == nil { + return nil + } + return r.Uniprots +} + +// reactionL is where Load methods for each relationship are stored. +type reactionL struct{} + +var ( + reactionAllColumns = []string{"id", "directional", "accession", "status", "comment", "equation", "html_equation", "is_chemically_balanced", "is_transport", "ec", "location"} + reactionColumnsWithoutDefault = []string{} + reactionColumnsWithDefault = []string{"id", "directional", "accession", "status", "comment", "equation", "html_equation", "is_chemically_balanced", "is_transport", "ec", "location"} + reactionPrimaryKeyColumns = []string{"accession"} + reactionGeneratedColumns = []string{} +) + +type ( + // ReactionSlice is an alias for a slice of pointers to Reaction. + // This should almost always be used instead of []Reaction. + ReactionSlice []*Reaction + // ReactionHook is the signature for custom Reaction hook methods + ReactionHook func(context.Context, boil.ContextExecutor, *Reaction) error + + reactionQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + reactionType = reflect.TypeOf(&Reaction{}) + reactionMapping = queries.MakeStructMapping(reactionType) + reactionPrimaryKeyMapping, _ = queries.BindMapping(reactionType, reactionMapping, reactionPrimaryKeyColumns) + reactionInsertCacheMut sync.RWMutex + reactionInsertCache = make(map[string]insertCache) + reactionUpdateCacheMut sync.RWMutex + reactionUpdateCache = make(map[string]updateCache) + reactionUpsertCacheMut sync.RWMutex + reactionUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var reactionAfterSelectHooks []ReactionHook + +var reactionBeforeInsertHooks []ReactionHook +var reactionAfterInsertHooks []ReactionHook + +var reactionBeforeUpdateHooks []ReactionHook +var reactionAfterUpdateHooks []ReactionHook + +var reactionBeforeDeleteHooks []ReactionHook +var reactionAfterDeleteHooks []ReactionHook + +var reactionBeforeUpsertHooks []ReactionHook +var reactionAfterUpsertHooks []ReactionHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *Reaction) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range reactionAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *Reaction) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range reactionBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *Reaction) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range reactionAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *Reaction) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range reactionBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *Reaction) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range reactionAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *Reaction) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range reactionBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *Reaction) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range reactionAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *Reaction) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range reactionBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *Reaction) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range reactionAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddReactionHook registers your hook function for all future operations. +func AddReactionHook(hookPoint boil.HookPoint, reactionHook ReactionHook) { + switch hookPoint { + case boil.AfterSelectHook: + reactionAfterSelectHooks = append(reactionAfterSelectHooks, reactionHook) + case boil.BeforeInsertHook: + reactionBeforeInsertHooks = append(reactionBeforeInsertHooks, reactionHook) + case boil.AfterInsertHook: + reactionAfterInsertHooks = append(reactionAfterInsertHooks, reactionHook) + case boil.BeforeUpdateHook: + reactionBeforeUpdateHooks = append(reactionBeforeUpdateHooks, reactionHook) + case boil.AfterUpdateHook: + reactionAfterUpdateHooks = append(reactionAfterUpdateHooks, reactionHook) + case boil.BeforeDeleteHook: + reactionBeforeDeleteHooks = append(reactionBeforeDeleteHooks, reactionHook) + case boil.AfterDeleteHook: + reactionAfterDeleteHooks = append(reactionAfterDeleteHooks, reactionHook) + case boil.BeforeUpsertHook: + reactionBeforeUpsertHooks = append(reactionBeforeUpsertHooks, reactionHook) + case boil.AfterUpsertHook: + reactionAfterUpsertHooks = append(reactionAfterUpsertHooks, reactionHook) + } +} + +// One returns a single reaction record from the query. +func (q reactionQuery) One(ctx context.Context, exec boil.ContextExecutor) (*Reaction, error) { + o := &Reaction{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for reaction") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all Reaction records from the query. +func (q reactionQuery) All(ctx context.Context, exec boil.ContextExecutor) (ReactionSlice, error) { + var o []*Reaction + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to Reaction slice") + } + + if len(reactionAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all Reaction records in the query. +func (q reactionQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count reaction rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q reactionQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if reaction exists") + } + + return count > 0, nil +} + +// ReactionsideReactions retrieves all the reactionside_reaction's ReactionsideReactions with an executor. +func (o *Reaction) ReactionsideReactions(mods ...qm.QueryMod) reactionsideReactionQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"reactionside_reaction\".\"reaction\"=?", o.Accession), + ) + + return ReactionsideReactions(queryMods...) +} + +// Uniprots retrieves all the uniprot's Uniprots with an executor. +func (o *Reaction) Uniprots(mods ...qm.QueryMod) uniprotQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.InnerJoin("\"uniprot_to_reaction\" on \"uniprot\".\"accession\" = \"uniprot_to_reaction\".\"uniprot\""), + qm.Where("\"uniprot_to_reaction\".\"reaction\"=?", o.Accession), + ) + + return Uniprots(queryMods...) +} + +// LoadReactionsideReactions allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (reactionL) LoadReactionsideReactions(ctx context.Context, e boil.ContextExecutor, singular bool, maybeReaction interface{}, mods queries.Applicator) error { + var slice []*Reaction + var object *Reaction + + if singular { + object = maybeReaction.(*Reaction) + } else { + slice = *maybeReaction.(*[]*Reaction) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &reactionR{} + } + args = append(args, object.Accession) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &reactionR{} + } + + for _, a := range args { + if queries.Equal(a, obj.Accession) { + continue Outer + } + } + + args = append(args, obj.Accession) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`reactionside_reaction`), + qm.WhereIn(`reactionside_reaction.reaction in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load reactionside_reaction") + } + + var resultSlice []*ReactionsideReaction + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice reactionside_reaction") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on reactionside_reaction") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for reactionside_reaction") + } + + if len(reactionsideReactionAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.ReactionsideReactions = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &reactionsideReactionR{} + } + foreign.R.ReactionsideReactionReaction = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.Accession, foreign.Reaction) { + local.R.ReactionsideReactions = append(local.R.ReactionsideReactions, foreign) + if foreign.R == nil { + foreign.R = &reactionsideReactionR{} + } + foreign.R.ReactionsideReactionReaction = local + break + } + } + } + + return nil +} + +// LoadUniprots allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (reactionL) LoadUniprots(ctx context.Context, e boil.ContextExecutor, singular bool, maybeReaction interface{}, mods queries.Applicator) error { + var slice []*Reaction + var object *Reaction + + if singular { + object = maybeReaction.(*Reaction) + } else { + slice = *maybeReaction.(*[]*Reaction) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &reactionR{} + } + args = append(args, object.Accession) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &reactionR{} + } + + for _, a := range args { + if queries.Equal(a, obj.Accession) { + continue Outer + } + } + + args = append(args, obj.Accession) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.Select("\"uniprot\".\"accession\", \"uniprot\".\"database\", \"uniprot\".\"seqhash\", \"a\".\"reaction\""), + qm.From("\"uniprot\""), + qm.InnerJoin("\"uniprot_to_reaction\" as \"a\" on \"uniprot\".\"accession\" = \"a\".\"uniprot\""), + qm.WhereIn("\"a\".\"reaction\" in ?", args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load uniprot") + } + + var resultSlice []*Uniprot + + var localJoinCols []null.String + for results.Next() { + one := new(Uniprot) + var localJoinCol null.String + + err = results.Scan(&one.Accession, &one.Database, &one.Seqhash, &localJoinCol) + if err != nil { + return errors.Wrap(err, "failed to scan eager loaded results for uniprot") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "failed to plebian-bind eager loaded slice uniprot") + } + + resultSlice = append(resultSlice, one) + localJoinCols = append(localJoinCols, localJoinCol) + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on uniprot") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for uniprot") + } + + if len(uniprotAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.Uniprots = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &uniprotR{} + } + foreign.R.Reactions = append(foreign.R.Reactions, object) + } + return nil + } + + for i, foreign := range resultSlice { + localJoinCol := localJoinCols[i] + for _, local := range slice { + if queries.Equal(local.Accession, localJoinCol) { + local.R.Uniprots = append(local.R.Uniprots, foreign) + if foreign.R == nil { + foreign.R = &uniprotR{} + } + foreign.R.Reactions = append(foreign.R.Reactions, local) + break + } + } + } + + return nil +} + +// AddReactionsideReactions adds the given related objects to the existing relationships +// of the reaction, optionally inserting them as new records. +// Appends related to o.R.ReactionsideReactions. +// Sets related.R.ReactionsideReactionReaction appropriately. +func (o *Reaction) AddReactionsideReactions(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*ReactionsideReaction) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.Reaction, o.Accession) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"reactionside_reaction\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"reaction"}), + strmangle.WhereClause("\"", "\"", 0, reactionsideReactionPrimaryKeyColumns), + ) + values := []interface{}{o.Accession, rel.Reaction, rel.Reactionside} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.Reaction, o.Accession) + } + } + + if o.R == nil { + o.R = &reactionR{ + ReactionsideReactions: related, + } + } else { + o.R.ReactionsideReactions = append(o.R.ReactionsideReactions, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &reactionsideReactionR{ + ReactionsideReactionReaction: o, + } + } else { + rel.R.ReactionsideReactionReaction = o + } + } + return nil +} + +// AddUniprots adds the given related objects to the existing relationships +// of the reaction, optionally inserting them as new records. +// Appends related to o.R.Uniprots. +// Sets related.R.Reactions appropriately. +func (o *Reaction) AddUniprots(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Uniprot) error { + var err error + for _, rel := range related { + if insert { + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + } + + for _, rel := range related { + query := "insert into \"uniprot_to_reaction\" (\"reaction\", \"uniprot\") values (?, ?)" + values := []interface{}{o.Accession, rel.Accession} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err = exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to insert into join table") + } + } + if o.R == nil { + o.R = &reactionR{ + Uniprots: related, + } + } else { + o.R.Uniprots = append(o.R.Uniprots, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &uniprotR{ + Reactions: ReactionSlice{o}, + } + } else { + rel.R.Reactions = append(rel.R.Reactions, o) + } + } + return nil +} + +// SetUniprots removes all previously related items of the +// reaction replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.Reactions's Uniprots accordingly. +// Replaces o.R.Uniprots with related. +// Sets related.R.Reactions's Uniprots accordingly. +func (o *Reaction) SetUniprots(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Uniprot) error { + query := "delete from \"uniprot_to_reaction\" where \"reaction\" = ?" + values := []interface{}{o.Accession} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + removeUniprotsFromReactionsSlice(o, related) + if o.R != nil { + o.R.Uniprots = nil + } + + return o.AddUniprots(ctx, exec, insert, related...) +} + +// RemoveUniprots relationships from objects passed in. +// Removes related items from R.Uniprots (uses pointer comparison, removal does not keep order) +// Sets related.R.Reactions. +func (o *Reaction) RemoveUniprots(ctx context.Context, exec boil.ContextExecutor, related ...*Uniprot) error { + if len(related) == 0 { + return nil + } + + var err error + query := fmt.Sprintf( + "delete from \"uniprot_to_reaction\" where \"reaction\" = ? and \"uniprot\" in (%s)", + strmangle.Placeholders(dialect.UseIndexPlaceholders, len(related), 2, 1), + ) + values := []interface{}{o.Accession} + for _, rel := range related { + values = append(values, rel.Accession) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err = exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + removeUniprotsFromReactionsSlice(o, related) + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.Uniprots { + if rel != ri { + continue + } + + ln := len(o.R.Uniprots) + if ln > 1 && i < ln-1 { + o.R.Uniprots[i] = o.R.Uniprots[ln-1] + } + o.R.Uniprots = o.R.Uniprots[:ln-1] + break + } + } + + return nil +} + +func removeUniprotsFromReactionsSlice(o *Reaction, related []*Uniprot) { + for _, rel := range related { + if rel.R == nil { + continue + } + for i, ri := range rel.R.Reactions { + if !queries.Equal(o.Accession, ri.Accession) { + continue + } + + ln := len(rel.R.Reactions) + if ln > 1 && i < ln-1 { + rel.R.Reactions[i] = rel.R.Reactions[ln-1] + } + rel.R.Reactions = rel.R.Reactions[:ln-1] + break + } + } +} + +// Reactions retrieves all the records using an executor. +func Reactions(mods ...qm.QueryMod) reactionQuery { + mods = append(mods, qm.From("\"reaction\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"reaction\".*"}) + } + + return reactionQuery{q} +} + +// FindReaction retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindReaction(ctx context.Context, exec boil.ContextExecutor, accession null.String, selectCols ...string) (*Reaction, error) { + reactionObj := &Reaction{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"reaction\" where \"accession\"=?", sel, + ) + + q := queries.Raw(query, accession) + + err := q.Bind(ctx, exec, reactionObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from reaction") + } + + if err = reactionObj.doAfterSelectHooks(ctx, exec); err != nil { + return reactionObj, err + } + + return reactionObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *Reaction) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no reaction provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(reactionColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + reactionInsertCacheMut.RLock() + cache, cached := reactionInsertCache[key] + reactionInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + reactionAllColumns, + reactionColumnsWithDefault, + reactionColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(reactionType, reactionMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(reactionType, reactionMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"reaction\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"reaction\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into reaction") + } + + if !cached { + reactionInsertCacheMut.Lock() + reactionInsertCache[key] = cache + reactionInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the Reaction. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *Reaction) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + reactionUpdateCacheMut.RLock() + cache, cached := reactionUpdateCache[key] + reactionUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + reactionAllColumns, + reactionPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update reaction, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"reaction\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, reactionPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(reactionType, reactionMapping, append(wl, reactionPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update reaction row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for reaction") + } + + if !cached { + reactionUpdateCacheMut.Lock() + reactionUpdateCache[key] = cache + reactionUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q reactionQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for reaction") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for reaction") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o ReactionSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), reactionPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"reaction\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, reactionPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in reaction slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all reaction") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *Reaction) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no reaction provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(reactionColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + reactionUpsertCacheMut.RLock() + cache, cached := reactionUpsertCache[key] + reactionUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + reactionAllColumns, + reactionColumnsWithDefault, + reactionColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + reactionAllColumns, + reactionPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert reaction, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(reactionPrimaryKeyColumns)) + copy(conflict, reactionPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"reaction\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(reactionType, reactionMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(reactionType, reactionMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert reaction") + } + + if !cached { + reactionUpsertCacheMut.Lock() + reactionUpsertCache[key] = cache + reactionUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single Reaction record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *Reaction) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no Reaction provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), reactionPrimaryKeyMapping) + sql := "DELETE FROM \"reaction\" WHERE \"accession\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from reaction") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for reaction") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q reactionQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no reactionQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from reaction") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for reaction") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o ReactionSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(reactionBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), reactionPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"reaction\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, reactionPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from reaction slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for reaction") + } + + if len(reactionAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *Reaction) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindReaction(ctx, exec, o.Accession) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *ReactionSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := ReactionSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), reactionPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"reaction\".* FROM \"reaction\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, reactionPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in ReactionSlice") + } + + *o = slice + + return nil +} + +// ReactionExists checks if the Reaction row exists. +func ReactionExists(ctx context.Context, exec boil.ContextExecutor, accession null.String) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"reaction\" where \"accession\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, accession) + } + row := exec.QueryRowContext(ctx, sql, accession) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if reaction exists") + } + + return exists, nil +} diff --git a/models/reaction_participant.go b/models/reaction_participant.go new file mode 100644 index 0000000..6c3a486 --- /dev/null +++ b/models/reaction_participant.go @@ -0,0 +1,1304 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// ReactionParticipant is an object representing the database table. +type ReactionParticipant struct { + Compound null.String `boil:"compound" json:"compound,omitempty" toml:"compound" yaml:"compound,omitempty"` + Reactionside string `boil:"reactionside" json:"reactionside" toml:"reactionside" yaml:"reactionside"` + Contains null.Int64 `boil:"contains" json:"contains,omitempty" toml:"contains" yaml:"contains,omitempty"` + ContainsN string `boil:"contains_n" json:"contains_n" toml:"contains_n" yaml:"contains_n"` + Minus string `boil:"minus" json:"minus" toml:"minus" yaml:"minus"` + Plus string `boil:"plus" json:"plus" toml:"plus" yaml:"plus"` + + R *reactionParticipantR `boil:"-" json:"-" toml:"-" yaml:"-"` + L reactionParticipantL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var ReactionParticipantColumns = struct { + Compound string + Reactionside string + Contains string + ContainsN string + Minus string + Plus string +}{ + Compound: "compound", + Reactionside: "reactionside", + Contains: "contains", + ContainsN: "contains_n", + Minus: "minus", + Plus: "plus", +} + +var ReactionParticipantTableColumns = struct { + Compound string + Reactionside string + Contains string + ContainsN string + Minus string + Plus string +}{ + Compound: "reaction_participant.compound", + Reactionside: "reaction_participant.reactionside", + Contains: "reaction_participant.contains", + ContainsN: "reaction_participant.contains_n", + Minus: "reaction_participant.minus", + Plus: "reaction_participant.plus", +} + +// Generated where + +var ReactionParticipantWhere = struct { + Compound whereHelpernull_String + Reactionside whereHelperstring + Contains whereHelpernull_Int64 + ContainsN whereHelperstring + Minus whereHelperstring + Plus whereHelperstring +}{ + Compound: whereHelpernull_String{field: "\"reaction_participant\".\"compound\""}, + Reactionside: whereHelperstring{field: "\"reaction_participant\".\"reactionside\""}, + Contains: whereHelpernull_Int64{field: "\"reaction_participant\".\"contains\""}, + ContainsN: whereHelperstring{field: "\"reaction_participant\".\"contains_n\""}, + Minus: whereHelperstring{field: "\"reaction_participant\".\"minus\""}, + Plus: whereHelperstring{field: "\"reaction_participant\".\"plus\""}, +} + +// ReactionParticipantRels is where relationship names are stored. +var ReactionParticipantRels = struct { + ReactionParticipantReactionside string + ReactionParticipantCompound string +}{ + ReactionParticipantReactionside: "ReactionParticipantReactionside", + ReactionParticipantCompound: "ReactionParticipantCompound", +} + +// reactionParticipantR is where relationships are stored. +type reactionParticipantR struct { + ReactionParticipantReactionside *Reactionside `boil:"ReactionParticipantReactionside" json:"ReactionParticipantReactionside" toml:"ReactionParticipantReactionside" yaml:"ReactionParticipantReactionside"` + ReactionParticipantCompound *Compound `boil:"ReactionParticipantCompound" json:"ReactionParticipantCompound" toml:"ReactionParticipantCompound" yaml:"ReactionParticipantCompound"` +} + +// NewStruct creates a new relationship struct +func (*reactionParticipantR) NewStruct() *reactionParticipantR { + return &reactionParticipantR{} +} + +func (r *reactionParticipantR) GetReactionParticipantReactionside() *Reactionside { + if r == nil { + return nil + } + return r.ReactionParticipantReactionside +} + +func (r *reactionParticipantR) GetReactionParticipantCompound() *Compound { + if r == nil { + return nil + } + return r.ReactionParticipantCompound +} + +// reactionParticipantL is where Load methods for each relationship are stored. +type reactionParticipantL struct{} + +var ( + reactionParticipantAllColumns = []string{"compound", "reactionside", "contains", "contains_n", "minus", "plus"} + reactionParticipantColumnsWithoutDefault = []string{"reactionside"} + reactionParticipantColumnsWithDefault = []string{"compound", "contains", "contains_n", "minus", "plus"} + reactionParticipantPrimaryKeyColumns = []string{"compound", "reactionside"} + reactionParticipantGeneratedColumns = []string{} +) + +type ( + // ReactionParticipantSlice is an alias for a slice of pointers to ReactionParticipant. + // This should almost always be used instead of []ReactionParticipant. + ReactionParticipantSlice []*ReactionParticipant + // ReactionParticipantHook is the signature for custom ReactionParticipant hook methods + ReactionParticipantHook func(context.Context, boil.ContextExecutor, *ReactionParticipant) error + + reactionParticipantQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + reactionParticipantType = reflect.TypeOf(&ReactionParticipant{}) + reactionParticipantMapping = queries.MakeStructMapping(reactionParticipantType) + reactionParticipantPrimaryKeyMapping, _ = queries.BindMapping(reactionParticipantType, reactionParticipantMapping, reactionParticipantPrimaryKeyColumns) + reactionParticipantInsertCacheMut sync.RWMutex + reactionParticipantInsertCache = make(map[string]insertCache) + reactionParticipantUpdateCacheMut sync.RWMutex + reactionParticipantUpdateCache = make(map[string]updateCache) + reactionParticipantUpsertCacheMut sync.RWMutex + reactionParticipantUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var reactionParticipantAfterSelectHooks []ReactionParticipantHook + +var reactionParticipantBeforeInsertHooks []ReactionParticipantHook +var reactionParticipantAfterInsertHooks []ReactionParticipantHook + +var reactionParticipantBeforeUpdateHooks []ReactionParticipantHook +var reactionParticipantAfterUpdateHooks []ReactionParticipantHook + +var reactionParticipantBeforeDeleteHooks []ReactionParticipantHook +var reactionParticipantAfterDeleteHooks []ReactionParticipantHook + +var reactionParticipantBeforeUpsertHooks []ReactionParticipantHook +var reactionParticipantAfterUpsertHooks []ReactionParticipantHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *ReactionParticipant) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range reactionParticipantAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *ReactionParticipant) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range reactionParticipantBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *ReactionParticipant) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range reactionParticipantAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *ReactionParticipant) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range reactionParticipantBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *ReactionParticipant) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range reactionParticipantAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *ReactionParticipant) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range reactionParticipantBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *ReactionParticipant) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range reactionParticipantAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *ReactionParticipant) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range reactionParticipantBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *ReactionParticipant) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range reactionParticipantAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddReactionParticipantHook registers your hook function for all future operations. +func AddReactionParticipantHook(hookPoint boil.HookPoint, reactionParticipantHook ReactionParticipantHook) { + switch hookPoint { + case boil.AfterSelectHook: + reactionParticipantAfterSelectHooks = append(reactionParticipantAfterSelectHooks, reactionParticipantHook) + case boil.BeforeInsertHook: + reactionParticipantBeforeInsertHooks = append(reactionParticipantBeforeInsertHooks, reactionParticipantHook) + case boil.AfterInsertHook: + reactionParticipantAfterInsertHooks = append(reactionParticipantAfterInsertHooks, reactionParticipantHook) + case boil.BeforeUpdateHook: + reactionParticipantBeforeUpdateHooks = append(reactionParticipantBeforeUpdateHooks, reactionParticipantHook) + case boil.AfterUpdateHook: + reactionParticipantAfterUpdateHooks = append(reactionParticipantAfterUpdateHooks, reactionParticipantHook) + case boil.BeforeDeleteHook: + reactionParticipantBeforeDeleteHooks = append(reactionParticipantBeforeDeleteHooks, reactionParticipantHook) + case boil.AfterDeleteHook: + reactionParticipantAfterDeleteHooks = append(reactionParticipantAfterDeleteHooks, reactionParticipantHook) + case boil.BeforeUpsertHook: + reactionParticipantBeforeUpsertHooks = append(reactionParticipantBeforeUpsertHooks, reactionParticipantHook) + case boil.AfterUpsertHook: + reactionParticipantAfterUpsertHooks = append(reactionParticipantAfterUpsertHooks, reactionParticipantHook) + } +} + +// One returns a single reactionParticipant record from the query. +func (q reactionParticipantQuery) One(ctx context.Context, exec boil.ContextExecutor) (*ReactionParticipant, error) { + o := &ReactionParticipant{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for reaction_participant") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all ReactionParticipant records from the query. +func (q reactionParticipantQuery) All(ctx context.Context, exec boil.ContextExecutor) (ReactionParticipantSlice, error) { + var o []*ReactionParticipant + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to ReactionParticipant slice") + } + + if len(reactionParticipantAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all ReactionParticipant records in the query. +func (q reactionParticipantQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count reaction_participant rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q reactionParticipantQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if reaction_participant exists") + } + + return count > 0, nil +} + +// ReactionParticipantReactionside pointed to by the foreign key. +func (o *ReactionParticipant) ReactionParticipantReactionside(mods ...qm.QueryMod) reactionsideQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"accession\" = ?", o.Reactionside), + } + + queryMods = append(queryMods, mods...) + + return Reactionsides(queryMods...) +} + +// ReactionParticipantCompound pointed to by the foreign key. +func (o *ReactionParticipant) ReactionParticipantCompound(mods ...qm.QueryMod) compoundQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"accession\" = ?", o.Compound), + } + + queryMods = append(queryMods, mods...) + + return Compounds(queryMods...) +} + +// LoadReactionParticipantReactionside allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (reactionParticipantL) LoadReactionParticipantReactionside(ctx context.Context, e boil.ContextExecutor, singular bool, maybeReactionParticipant interface{}, mods queries.Applicator) error { + var slice []*ReactionParticipant + var object *ReactionParticipant + + if singular { + object = maybeReactionParticipant.(*ReactionParticipant) + } else { + slice = *maybeReactionParticipant.(*[]*ReactionParticipant) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &reactionParticipantR{} + } + if !queries.IsNil(object.Reactionside) { + args = append(args, object.Reactionside) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &reactionParticipantR{} + } + + for _, a := range args { + if queries.Equal(a, obj.Reactionside) { + continue Outer + } + } + + if !queries.IsNil(obj.Reactionside) { + args = append(args, obj.Reactionside) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`reactionside`), + qm.WhereIn(`reactionside.accession in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load Reactionside") + } + + var resultSlice []*Reactionside + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice Reactionside") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for reactionside") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for reactionside") + } + + if len(reactionParticipantAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.ReactionParticipantReactionside = foreign + if foreign.R == nil { + foreign.R = &reactionsideR{} + } + foreign.R.ReactionParticipants = append(foreign.R.ReactionParticipants, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.Reactionside, foreign.Accession) { + local.R.ReactionParticipantReactionside = foreign + if foreign.R == nil { + foreign.R = &reactionsideR{} + } + foreign.R.ReactionParticipants = append(foreign.R.ReactionParticipants, local) + break + } + } + } + + return nil +} + +// LoadReactionParticipantCompound allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (reactionParticipantL) LoadReactionParticipantCompound(ctx context.Context, e boil.ContextExecutor, singular bool, maybeReactionParticipant interface{}, mods queries.Applicator) error { + var slice []*ReactionParticipant + var object *ReactionParticipant + + if singular { + object = maybeReactionParticipant.(*ReactionParticipant) + } else { + slice = *maybeReactionParticipant.(*[]*ReactionParticipant) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &reactionParticipantR{} + } + if !queries.IsNil(object.Compound) { + args = append(args, object.Compound) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &reactionParticipantR{} + } + + for _, a := range args { + if queries.Equal(a, obj.Compound) { + continue Outer + } + } + + if !queries.IsNil(obj.Compound) { + args = append(args, obj.Compound) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`compound`), + qm.WhereIn(`compound.accession in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load Compound") + } + + var resultSlice []*Compound + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice Compound") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for compound") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for compound") + } + + if len(reactionParticipantAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.ReactionParticipantCompound = foreign + if foreign.R == nil { + foreign.R = &compoundR{} + } + foreign.R.ReactionParticipants = append(foreign.R.ReactionParticipants, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.Compound, foreign.Accession) { + local.R.ReactionParticipantCompound = foreign + if foreign.R == nil { + foreign.R = &compoundR{} + } + foreign.R.ReactionParticipants = append(foreign.R.ReactionParticipants, local) + break + } + } + } + + return nil +} + +// SetReactionParticipantReactionside of the reactionParticipant to the related item. +// Sets o.R.ReactionParticipantReactionside to related. +// Adds o to related.R.ReactionParticipants. +func (o *ReactionParticipant) SetReactionParticipantReactionside(ctx context.Context, exec boil.ContextExecutor, insert bool, related *Reactionside) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"reaction_participant\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"reactionside"}), + strmangle.WhereClause("\"", "\"", 0, reactionParticipantPrimaryKeyColumns), + ) + values := []interface{}{related.Accession, o.Compound, o.Reactionside} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.Reactionside, related.Accession) + if o.R == nil { + o.R = &reactionParticipantR{ + ReactionParticipantReactionside: related, + } + } else { + o.R.ReactionParticipantReactionside = related + } + + if related.R == nil { + related.R = &reactionsideR{ + ReactionParticipants: ReactionParticipantSlice{o}, + } + } else { + related.R.ReactionParticipants = append(related.R.ReactionParticipants, o) + } + + return nil +} + +// SetReactionParticipantCompound of the reactionParticipant to the related item. +// Sets o.R.ReactionParticipantCompound to related. +// Adds o to related.R.ReactionParticipants. +func (o *ReactionParticipant) SetReactionParticipantCompound(ctx context.Context, exec boil.ContextExecutor, insert bool, related *Compound) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"reaction_participant\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"compound"}), + strmangle.WhereClause("\"", "\"", 0, reactionParticipantPrimaryKeyColumns), + ) + values := []interface{}{related.Accession, o.Compound, o.Reactionside} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.Compound, related.Accession) + if o.R == nil { + o.R = &reactionParticipantR{ + ReactionParticipantCompound: related, + } + } else { + o.R.ReactionParticipantCompound = related + } + + if related.R == nil { + related.R = &compoundR{ + ReactionParticipants: ReactionParticipantSlice{o}, + } + } else { + related.R.ReactionParticipants = append(related.R.ReactionParticipants, o) + } + + return nil +} + +// RemoveReactionParticipantCompound relationship. +// Sets o.R.ReactionParticipantCompound to nil. +// Removes o from all passed in related items' relationships struct. +func (o *ReactionParticipant) RemoveReactionParticipantCompound(ctx context.Context, exec boil.ContextExecutor, related *Compound) error { + var err error + + queries.SetScanner(&o.Compound, nil) + if _, err = o.Update(ctx, exec, boil.Whitelist("compound")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.ReactionParticipantCompound = nil + } + if related == nil || related.R == nil { + return nil + } + + for i, ri := range related.R.ReactionParticipants { + if queries.Equal(o.Compound, ri.Compound) { + continue + } + + ln := len(related.R.ReactionParticipants) + if ln > 1 && i < ln-1 { + related.R.ReactionParticipants[i] = related.R.ReactionParticipants[ln-1] + } + related.R.ReactionParticipants = related.R.ReactionParticipants[:ln-1] + break + } + return nil +} + +// ReactionParticipants retrieves all the records using an executor. +func ReactionParticipants(mods ...qm.QueryMod) reactionParticipantQuery { + mods = append(mods, qm.From("\"reaction_participant\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"reaction_participant\".*"}) + } + + return reactionParticipantQuery{q} +} + +// FindReactionParticipant retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindReactionParticipant(ctx context.Context, exec boil.ContextExecutor, compound null.String, reactionside string, selectCols ...string) (*ReactionParticipant, error) { + reactionParticipantObj := &ReactionParticipant{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"reaction_participant\" where \"compound\"=? AND \"reactionside\"=?", sel, + ) + + q := queries.Raw(query, compound, reactionside) + + err := q.Bind(ctx, exec, reactionParticipantObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from reaction_participant") + } + + if err = reactionParticipantObj.doAfterSelectHooks(ctx, exec); err != nil { + return reactionParticipantObj, err + } + + return reactionParticipantObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *ReactionParticipant) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no reaction_participant provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(reactionParticipantColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + reactionParticipantInsertCacheMut.RLock() + cache, cached := reactionParticipantInsertCache[key] + reactionParticipantInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + reactionParticipantAllColumns, + reactionParticipantColumnsWithDefault, + reactionParticipantColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(reactionParticipantType, reactionParticipantMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(reactionParticipantType, reactionParticipantMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"reaction_participant\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"reaction_participant\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into reaction_participant") + } + + if !cached { + reactionParticipantInsertCacheMut.Lock() + reactionParticipantInsertCache[key] = cache + reactionParticipantInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the ReactionParticipant. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *ReactionParticipant) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + reactionParticipantUpdateCacheMut.RLock() + cache, cached := reactionParticipantUpdateCache[key] + reactionParticipantUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + reactionParticipantAllColumns, + reactionParticipantPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update reaction_participant, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"reaction_participant\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, reactionParticipantPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(reactionParticipantType, reactionParticipantMapping, append(wl, reactionParticipantPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update reaction_participant row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for reaction_participant") + } + + if !cached { + reactionParticipantUpdateCacheMut.Lock() + reactionParticipantUpdateCache[key] = cache + reactionParticipantUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q reactionParticipantQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for reaction_participant") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for reaction_participant") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o ReactionParticipantSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), reactionParticipantPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"reaction_participant\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, reactionParticipantPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in reactionParticipant slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all reactionParticipant") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *ReactionParticipant) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no reaction_participant provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(reactionParticipantColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + reactionParticipantUpsertCacheMut.RLock() + cache, cached := reactionParticipantUpsertCache[key] + reactionParticipantUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + reactionParticipantAllColumns, + reactionParticipantColumnsWithDefault, + reactionParticipantColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + reactionParticipantAllColumns, + reactionParticipantPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert reaction_participant, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(reactionParticipantPrimaryKeyColumns)) + copy(conflict, reactionParticipantPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"reaction_participant\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(reactionParticipantType, reactionParticipantMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(reactionParticipantType, reactionParticipantMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert reaction_participant") + } + + if !cached { + reactionParticipantUpsertCacheMut.Lock() + reactionParticipantUpsertCache[key] = cache + reactionParticipantUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single ReactionParticipant record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *ReactionParticipant) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no ReactionParticipant provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), reactionParticipantPrimaryKeyMapping) + sql := "DELETE FROM \"reaction_participant\" WHERE \"compound\"=? AND \"reactionside\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from reaction_participant") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for reaction_participant") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q reactionParticipantQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no reactionParticipantQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from reaction_participant") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for reaction_participant") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o ReactionParticipantSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(reactionParticipantBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), reactionParticipantPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"reaction_participant\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, reactionParticipantPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from reactionParticipant slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for reaction_participant") + } + + if len(reactionParticipantAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *ReactionParticipant) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindReactionParticipant(ctx, exec, o.Compound, o.Reactionside) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *ReactionParticipantSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := ReactionParticipantSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), reactionParticipantPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"reaction_participant\".* FROM \"reaction_participant\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, reactionParticipantPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in ReactionParticipantSlice") + } + + *o = slice + + return nil +} + +// ReactionParticipantExists checks if the ReactionParticipant row exists. +func ReactionParticipantExists(ctx context.Context, exec boil.ContextExecutor, compound null.String, reactionside string) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"reaction_participant\" where \"compound\"=? AND \"reactionside\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, compound, reactionside) + } + row := exec.QueryRowContext(ctx, sql, compound, reactionside) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if reaction_participant exists") + } + + return exists, nil +} diff --git a/models/reactionside.go b/models/reactionside.go new file mode 100644 index 0000000..680eac5 --- /dev/null +++ b/models/reactionside.go @@ -0,0 +1,1234 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// Reactionside is an object representing the database table. +type Reactionside struct { + Accession null.String `boil:"accession" json:"accession,omitempty" toml:"accession" yaml:"accession,omitempty"` + + R *reactionsideR `boil:"-" json:"-" toml:"-" yaml:"-"` + L reactionsideL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var ReactionsideColumns = struct { + Accession string +}{ + Accession: "accession", +} + +var ReactionsideTableColumns = struct { + Accession string +}{ + Accession: "reactionside.accession", +} + +// Generated where + +var ReactionsideWhere = struct { + Accession whereHelpernull_String +}{ + Accession: whereHelpernull_String{field: "\"reactionside\".\"accession\""}, +} + +// ReactionsideRels is where relationship names are stored. +var ReactionsideRels = struct { + ReactionParticipants string + ReactionsideReactions string +}{ + ReactionParticipants: "ReactionParticipants", + ReactionsideReactions: "ReactionsideReactions", +} + +// reactionsideR is where relationships are stored. +type reactionsideR struct { + ReactionParticipants ReactionParticipantSlice `boil:"ReactionParticipants" json:"ReactionParticipants" toml:"ReactionParticipants" yaml:"ReactionParticipants"` + ReactionsideReactions ReactionsideReactionSlice `boil:"ReactionsideReactions" json:"ReactionsideReactions" toml:"ReactionsideReactions" yaml:"ReactionsideReactions"` +} + +// NewStruct creates a new relationship struct +func (*reactionsideR) NewStruct() *reactionsideR { + return &reactionsideR{} +} + +func (r *reactionsideR) GetReactionParticipants() ReactionParticipantSlice { + if r == nil { + return nil + } + return r.ReactionParticipants +} + +func (r *reactionsideR) GetReactionsideReactions() ReactionsideReactionSlice { + if r == nil { + return nil + } + return r.ReactionsideReactions +} + +// reactionsideL is where Load methods for each relationship are stored. +type reactionsideL struct{} + +var ( + reactionsideAllColumns = []string{"accession"} + reactionsideColumnsWithoutDefault = []string{} + reactionsideColumnsWithDefault = []string{"accession"} + reactionsidePrimaryKeyColumns = []string{"accession"} + reactionsideGeneratedColumns = []string{} +) + +type ( + // ReactionsideSlice is an alias for a slice of pointers to Reactionside. + // This should almost always be used instead of []Reactionside. + ReactionsideSlice []*Reactionside + // ReactionsideHook is the signature for custom Reactionside hook methods + ReactionsideHook func(context.Context, boil.ContextExecutor, *Reactionside) error + + reactionsideQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + reactionsideType = reflect.TypeOf(&Reactionside{}) + reactionsideMapping = queries.MakeStructMapping(reactionsideType) + reactionsidePrimaryKeyMapping, _ = queries.BindMapping(reactionsideType, reactionsideMapping, reactionsidePrimaryKeyColumns) + reactionsideInsertCacheMut sync.RWMutex + reactionsideInsertCache = make(map[string]insertCache) + reactionsideUpdateCacheMut sync.RWMutex + reactionsideUpdateCache = make(map[string]updateCache) + reactionsideUpsertCacheMut sync.RWMutex + reactionsideUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var reactionsideAfterSelectHooks []ReactionsideHook + +var reactionsideBeforeInsertHooks []ReactionsideHook +var reactionsideAfterInsertHooks []ReactionsideHook + +var reactionsideBeforeUpdateHooks []ReactionsideHook +var reactionsideAfterUpdateHooks []ReactionsideHook + +var reactionsideBeforeDeleteHooks []ReactionsideHook +var reactionsideAfterDeleteHooks []ReactionsideHook + +var reactionsideBeforeUpsertHooks []ReactionsideHook +var reactionsideAfterUpsertHooks []ReactionsideHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *Reactionside) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range reactionsideAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *Reactionside) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range reactionsideBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *Reactionside) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range reactionsideAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *Reactionside) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range reactionsideBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *Reactionside) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range reactionsideAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *Reactionside) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range reactionsideBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *Reactionside) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range reactionsideAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *Reactionside) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range reactionsideBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *Reactionside) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range reactionsideAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddReactionsideHook registers your hook function for all future operations. +func AddReactionsideHook(hookPoint boil.HookPoint, reactionsideHook ReactionsideHook) { + switch hookPoint { + case boil.AfterSelectHook: + reactionsideAfterSelectHooks = append(reactionsideAfterSelectHooks, reactionsideHook) + case boil.BeforeInsertHook: + reactionsideBeforeInsertHooks = append(reactionsideBeforeInsertHooks, reactionsideHook) + case boil.AfterInsertHook: + reactionsideAfterInsertHooks = append(reactionsideAfterInsertHooks, reactionsideHook) + case boil.BeforeUpdateHook: + reactionsideBeforeUpdateHooks = append(reactionsideBeforeUpdateHooks, reactionsideHook) + case boil.AfterUpdateHook: + reactionsideAfterUpdateHooks = append(reactionsideAfterUpdateHooks, reactionsideHook) + case boil.BeforeDeleteHook: + reactionsideBeforeDeleteHooks = append(reactionsideBeforeDeleteHooks, reactionsideHook) + case boil.AfterDeleteHook: + reactionsideAfterDeleteHooks = append(reactionsideAfterDeleteHooks, reactionsideHook) + case boil.BeforeUpsertHook: + reactionsideBeforeUpsertHooks = append(reactionsideBeforeUpsertHooks, reactionsideHook) + case boil.AfterUpsertHook: + reactionsideAfterUpsertHooks = append(reactionsideAfterUpsertHooks, reactionsideHook) + } +} + +// One returns a single reactionside record from the query. +func (q reactionsideQuery) One(ctx context.Context, exec boil.ContextExecutor) (*Reactionside, error) { + o := &Reactionside{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for reactionside") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all Reactionside records from the query. +func (q reactionsideQuery) All(ctx context.Context, exec boil.ContextExecutor) (ReactionsideSlice, error) { + var o []*Reactionside + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to Reactionside slice") + } + + if len(reactionsideAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all Reactionside records in the query. +func (q reactionsideQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count reactionside rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q reactionsideQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if reactionside exists") + } + + return count > 0, nil +} + +// ReactionParticipants retrieves all the reaction_participant's ReactionParticipants with an executor. +func (o *Reactionside) ReactionParticipants(mods ...qm.QueryMod) reactionParticipantQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"reaction_participant\".\"reactionside\"=?", o.Accession), + ) + + return ReactionParticipants(queryMods...) +} + +// ReactionsideReactions retrieves all the reactionside_reaction's ReactionsideReactions with an executor. +func (o *Reactionside) ReactionsideReactions(mods ...qm.QueryMod) reactionsideReactionQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"reactionside_reaction\".\"reactionside\"=?", o.Accession), + ) + + return ReactionsideReactions(queryMods...) +} + +// LoadReactionParticipants allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (reactionsideL) LoadReactionParticipants(ctx context.Context, e boil.ContextExecutor, singular bool, maybeReactionside interface{}, mods queries.Applicator) error { + var slice []*Reactionside + var object *Reactionside + + if singular { + object = maybeReactionside.(*Reactionside) + } else { + slice = *maybeReactionside.(*[]*Reactionside) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &reactionsideR{} + } + args = append(args, object.Accession) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &reactionsideR{} + } + + for _, a := range args { + if queries.Equal(a, obj.Accession) { + continue Outer + } + } + + args = append(args, obj.Accession) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`reaction_participant`), + qm.WhereIn(`reaction_participant.reactionside in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load reaction_participant") + } + + var resultSlice []*ReactionParticipant + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice reaction_participant") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on reaction_participant") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for reaction_participant") + } + + if len(reactionParticipantAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.ReactionParticipants = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &reactionParticipantR{} + } + foreign.R.ReactionParticipantReactionside = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.Accession, foreign.Reactionside) { + local.R.ReactionParticipants = append(local.R.ReactionParticipants, foreign) + if foreign.R == nil { + foreign.R = &reactionParticipantR{} + } + foreign.R.ReactionParticipantReactionside = local + break + } + } + } + + return nil +} + +// LoadReactionsideReactions allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (reactionsideL) LoadReactionsideReactions(ctx context.Context, e boil.ContextExecutor, singular bool, maybeReactionside interface{}, mods queries.Applicator) error { + var slice []*Reactionside + var object *Reactionside + + if singular { + object = maybeReactionside.(*Reactionside) + } else { + slice = *maybeReactionside.(*[]*Reactionside) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &reactionsideR{} + } + args = append(args, object.Accession) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &reactionsideR{} + } + + for _, a := range args { + if queries.Equal(a, obj.Accession) { + continue Outer + } + } + + args = append(args, obj.Accession) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`reactionside_reaction`), + qm.WhereIn(`reactionside_reaction.reactionside in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load reactionside_reaction") + } + + var resultSlice []*ReactionsideReaction + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice reactionside_reaction") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on reactionside_reaction") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for reactionside_reaction") + } + + if len(reactionsideReactionAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.ReactionsideReactions = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &reactionsideReactionR{} + } + foreign.R.ReactionsideReactionReactionside = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.Accession, foreign.Reactionside) { + local.R.ReactionsideReactions = append(local.R.ReactionsideReactions, foreign) + if foreign.R == nil { + foreign.R = &reactionsideReactionR{} + } + foreign.R.ReactionsideReactionReactionside = local + break + } + } + } + + return nil +} + +// AddReactionParticipants adds the given related objects to the existing relationships +// of the reactionside, optionally inserting them as new records. +// Appends related to o.R.ReactionParticipants. +// Sets related.R.ReactionParticipantReactionside appropriately. +func (o *Reactionside) AddReactionParticipants(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*ReactionParticipant) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.Reactionside, o.Accession) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"reaction_participant\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"reactionside"}), + strmangle.WhereClause("\"", "\"", 0, reactionParticipantPrimaryKeyColumns), + ) + values := []interface{}{o.Accession, rel.Compound, rel.Reactionside} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.Reactionside, o.Accession) + } + } + + if o.R == nil { + o.R = &reactionsideR{ + ReactionParticipants: related, + } + } else { + o.R.ReactionParticipants = append(o.R.ReactionParticipants, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &reactionParticipantR{ + ReactionParticipantReactionside: o, + } + } else { + rel.R.ReactionParticipantReactionside = o + } + } + return nil +} + +// AddReactionsideReactions adds the given related objects to the existing relationships +// of the reactionside, optionally inserting them as new records. +// Appends related to o.R.ReactionsideReactions. +// Sets related.R.ReactionsideReactionReactionside appropriately. +func (o *Reactionside) AddReactionsideReactions(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*ReactionsideReaction) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.Reactionside, o.Accession) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"reactionside_reaction\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"reactionside"}), + strmangle.WhereClause("\"", "\"", 0, reactionsideReactionPrimaryKeyColumns), + ) + values := []interface{}{o.Accession, rel.Reaction, rel.Reactionside} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.Reactionside, o.Accession) + } + } + + if o.R == nil { + o.R = &reactionsideR{ + ReactionsideReactions: related, + } + } else { + o.R.ReactionsideReactions = append(o.R.ReactionsideReactions, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &reactionsideReactionR{ + ReactionsideReactionReactionside: o, + } + } else { + rel.R.ReactionsideReactionReactionside = o + } + } + return nil +} + +// Reactionsides retrieves all the records using an executor. +func Reactionsides(mods ...qm.QueryMod) reactionsideQuery { + mods = append(mods, qm.From("\"reactionside\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"reactionside\".*"}) + } + + return reactionsideQuery{q} +} + +// FindReactionside retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindReactionside(ctx context.Context, exec boil.ContextExecutor, accession null.String, selectCols ...string) (*Reactionside, error) { + reactionsideObj := &Reactionside{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"reactionside\" where \"accession\"=?", sel, + ) + + q := queries.Raw(query, accession) + + err := q.Bind(ctx, exec, reactionsideObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from reactionside") + } + + if err = reactionsideObj.doAfterSelectHooks(ctx, exec); err != nil { + return reactionsideObj, err + } + + return reactionsideObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *Reactionside) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no reactionside provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(reactionsideColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + reactionsideInsertCacheMut.RLock() + cache, cached := reactionsideInsertCache[key] + reactionsideInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + reactionsideAllColumns, + reactionsideColumnsWithDefault, + reactionsideColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(reactionsideType, reactionsideMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(reactionsideType, reactionsideMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"reactionside\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"reactionside\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into reactionside") + } + + if !cached { + reactionsideInsertCacheMut.Lock() + reactionsideInsertCache[key] = cache + reactionsideInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the Reactionside. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *Reactionside) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + reactionsideUpdateCacheMut.RLock() + cache, cached := reactionsideUpdateCache[key] + reactionsideUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + reactionsideAllColumns, + reactionsidePrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update reactionside, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"reactionside\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, reactionsidePrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(reactionsideType, reactionsideMapping, append(wl, reactionsidePrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update reactionside row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for reactionside") + } + + if !cached { + reactionsideUpdateCacheMut.Lock() + reactionsideUpdateCache[key] = cache + reactionsideUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q reactionsideQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for reactionside") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for reactionside") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o ReactionsideSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), reactionsidePrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"reactionside\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, reactionsidePrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in reactionside slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all reactionside") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *Reactionside) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no reactionside provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(reactionsideColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + reactionsideUpsertCacheMut.RLock() + cache, cached := reactionsideUpsertCache[key] + reactionsideUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + reactionsideAllColumns, + reactionsideColumnsWithDefault, + reactionsideColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + reactionsideAllColumns, + reactionsidePrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert reactionside, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(reactionsidePrimaryKeyColumns)) + copy(conflict, reactionsidePrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"reactionside\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(reactionsideType, reactionsideMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(reactionsideType, reactionsideMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert reactionside") + } + + if !cached { + reactionsideUpsertCacheMut.Lock() + reactionsideUpsertCache[key] = cache + reactionsideUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single Reactionside record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *Reactionside) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no Reactionside provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), reactionsidePrimaryKeyMapping) + sql := "DELETE FROM \"reactionside\" WHERE \"accession\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from reactionside") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for reactionside") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q reactionsideQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no reactionsideQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from reactionside") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for reactionside") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o ReactionsideSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(reactionsideBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), reactionsidePrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"reactionside\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, reactionsidePrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from reactionside slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for reactionside") + } + + if len(reactionsideAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *Reactionside) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindReactionside(ctx, exec, o.Accession) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *ReactionsideSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := ReactionsideSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), reactionsidePrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"reactionside\".* FROM \"reactionside\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, reactionsidePrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in ReactionsideSlice") + } + + *o = slice + + return nil +} + +// ReactionsideExists checks if the Reactionside row exists. +func ReactionsideExists(ctx context.Context, exec boil.ContextExecutor, accession null.String) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"reactionside\" where \"accession\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, accession) + } + row := exec.QueryRowContext(ctx, sql, accession) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if reactionside exists") + } + + return exists, nil +} diff --git a/models/reactionside_reaction.go b/models/reactionside_reaction.go new file mode 100644 index 0000000..e2a3f49 --- /dev/null +++ b/models/reactionside_reaction.go @@ -0,0 +1,1249 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// ReactionsideReaction is an object representing the database table. +type ReactionsideReaction struct { + Reaction string `boil:"reaction" json:"reaction" toml:"reaction" yaml:"reaction"` + Reactionside string `boil:"reactionside" json:"reactionside" toml:"reactionside" yaml:"reactionside"` + ReactionsideReactionType string `boil:"reactionside_reaction_type" json:"reactionside_reaction_type" toml:"reactionside_reaction_type" yaml:"reactionside_reaction_type"` + + R *reactionsideReactionR `boil:"-" json:"-" toml:"-" yaml:"-"` + L reactionsideReactionL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var ReactionsideReactionColumns = struct { + Reaction string + Reactionside string + ReactionsideReactionType string +}{ + Reaction: "reaction", + Reactionside: "reactionside", + ReactionsideReactionType: "reactionside_reaction_type", +} + +var ReactionsideReactionTableColumns = struct { + Reaction string + Reactionside string + ReactionsideReactionType string +}{ + Reaction: "reactionside_reaction.reaction", + Reactionside: "reactionside_reaction.reactionside", + ReactionsideReactionType: "reactionside_reaction.reactionside_reaction_type", +} + +// Generated where + +var ReactionsideReactionWhere = struct { + Reaction whereHelperstring + Reactionside whereHelperstring + ReactionsideReactionType whereHelperstring +}{ + Reaction: whereHelperstring{field: "\"reactionside_reaction\".\"reaction\""}, + Reactionside: whereHelperstring{field: "\"reactionside_reaction\".\"reactionside\""}, + ReactionsideReactionType: whereHelperstring{field: "\"reactionside_reaction\".\"reactionside_reaction_type\""}, +} + +// ReactionsideReactionRels is where relationship names are stored. +var ReactionsideReactionRels = struct { + ReactionsideReactionReactionside string + ReactionsideReactionReaction string +}{ + ReactionsideReactionReactionside: "ReactionsideReactionReactionside", + ReactionsideReactionReaction: "ReactionsideReactionReaction", +} + +// reactionsideReactionR is where relationships are stored. +type reactionsideReactionR struct { + ReactionsideReactionReactionside *Reactionside `boil:"ReactionsideReactionReactionside" json:"ReactionsideReactionReactionside" toml:"ReactionsideReactionReactionside" yaml:"ReactionsideReactionReactionside"` + ReactionsideReactionReaction *Reaction `boil:"ReactionsideReactionReaction" json:"ReactionsideReactionReaction" toml:"ReactionsideReactionReaction" yaml:"ReactionsideReactionReaction"` +} + +// NewStruct creates a new relationship struct +func (*reactionsideReactionR) NewStruct() *reactionsideReactionR { + return &reactionsideReactionR{} +} + +func (r *reactionsideReactionR) GetReactionsideReactionReactionside() *Reactionside { + if r == nil { + return nil + } + return r.ReactionsideReactionReactionside +} + +func (r *reactionsideReactionR) GetReactionsideReactionReaction() *Reaction { + if r == nil { + return nil + } + return r.ReactionsideReactionReaction +} + +// reactionsideReactionL is where Load methods for each relationship are stored. +type reactionsideReactionL struct{} + +var ( + reactionsideReactionAllColumns = []string{"reaction", "reactionside", "reactionside_reaction_type"} + reactionsideReactionColumnsWithoutDefault = []string{"reaction", "reactionside", "reactionside_reaction_type"} + reactionsideReactionColumnsWithDefault = []string{} + reactionsideReactionPrimaryKeyColumns = []string{"reaction", "reactionside"} + reactionsideReactionGeneratedColumns = []string{} +) + +type ( + // ReactionsideReactionSlice is an alias for a slice of pointers to ReactionsideReaction. + // This should almost always be used instead of []ReactionsideReaction. + ReactionsideReactionSlice []*ReactionsideReaction + // ReactionsideReactionHook is the signature for custom ReactionsideReaction hook methods + ReactionsideReactionHook func(context.Context, boil.ContextExecutor, *ReactionsideReaction) error + + reactionsideReactionQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + reactionsideReactionType = reflect.TypeOf(&ReactionsideReaction{}) + reactionsideReactionMapping = queries.MakeStructMapping(reactionsideReactionType) + reactionsideReactionPrimaryKeyMapping, _ = queries.BindMapping(reactionsideReactionType, reactionsideReactionMapping, reactionsideReactionPrimaryKeyColumns) + reactionsideReactionInsertCacheMut sync.RWMutex + reactionsideReactionInsertCache = make(map[string]insertCache) + reactionsideReactionUpdateCacheMut sync.RWMutex + reactionsideReactionUpdateCache = make(map[string]updateCache) + reactionsideReactionUpsertCacheMut sync.RWMutex + reactionsideReactionUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var reactionsideReactionAfterSelectHooks []ReactionsideReactionHook + +var reactionsideReactionBeforeInsertHooks []ReactionsideReactionHook +var reactionsideReactionAfterInsertHooks []ReactionsideReactionHook + +var reactionsideReactionBeforeUpdateHooks []ReactionsideReactionHook +var reactionsideReactionAfterUpdateHooks []ReactionsideReactionHook + +var reactionsideReactionBeforeDeleteHooks []ReactionsideReactionHook +var reactionsideReactionAfterDeleteHooks []ReactionsideReactionHook + +var reactionsideReactionBeforeUpsertHooks []ReactionsideReactionHook +var reactionsideReactionAfterUpsertHooks []ReactionsideReactionHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *ReactionsideReaction) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range reactionsideReactionAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *ReactionsideReaction) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range reactionsideReactionBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *ReactionsideReaction) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range reactionsideReactionAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *ReactionsideReaction) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range reactionsideReactionBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *ReactionsideReaction) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range reactionsideReactionAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *ReactionsideReaction) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range reactionsideReactionBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *ReactionsideReaction) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range reactionsideReactionAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *ReactionsideReaction) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range reactionsideReactionBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *ReactionsideReaction) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range reactionsideReactionAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddReactionsideReactionHook registers your hook function for all future operations. +func AddReactionsideReactionHook(hookPoint boil.HookPoint, reactionsideReactionHook ReactionsideReactionHook) { + switch hookPoint { + case boil.AfterSelectHook: + reactionsideReactionAfterSelectHooks = append(reactionsideReactionAfterSelectHooks, reactionsideReactionHook) + case boil.BeforeInsertHook: + reactionsideReactionBeforeInsertHooks = append(reactionsideReactionBeforeInsertHooks, reactionsideReactionHook) + case boil.AfterInsertHook: + reactionsideReactionAfterInsertHooks = append(reactionsideReactionAfterInsertHooks, reactionsideReactionHook) + case boil.BeforeUpdateHook: + reactionsideReactionBeforeUpdateHooks = append(reactionsideReactionBeforeUpdateHooks, reactionsideReactionHook) + case boil.AfterUpdateHook: + reactionsideReactionAfterUpdateHooks = append(reactionsideReactionAfterUpdateHooks, reactionsideReactionHook) + case boil.BeforeDeleteHook: + reactionsideReactionBeforeDeleteHooks = append(reactionsideReactionBeforeDeleteHooks, reactionsideReactionHook) + case boil.AfterDeleteHook: + reactionsideReactionAfterDeleteHooks = append(reactionsideReactionAfterDeleteHooks, reactionsideReactionHook) + case boil.BeforeUpsertHook: + reactionsideReactionBeforeUpsertHooks = append(reactionsideReactionBeforeUpsertHooks, reactionsideReactionHook) + case boil.AfterUpsertHook: + reactionsideReactionAfterUpsertHooks = append(reactionsideReactionAfterUpsertHooks, reactionsideReactionHook) + } +} + +// One returns a single reactionsideReaction record from the query. +func (q reactionsideReactionQuery) One(ctx context.Context, exec boil.ContextExecutor) (*ReactionsideReaction, error) { + o := &ReactionsideReaction{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for reactionside_reaction") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all ReactionsideReaction records from the query. +func (q reactionsideReactionQuery) All(ctx context.Context, exec boil.ContextExecutor) (ReactionsideReactionSlice, error) { + var o []*ReactionsideReaction + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to ReactionsideReaction slice") + } + + if len(reactionsideReactionAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all ReactionsideReaction records in the query. +func (q reactionsideReactionQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count reactionside_reaction rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q reactionsideReactionQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if reactionside_reaction exists") + } + + return count > 0, nil +} + +// ReactionsideReactionReactionside pointed to by the foreign key. +func (o *ReactionsideReaction) ReactionsideReactionReactionside(mods ...qm.QueryMod) reactionsideQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"accession\" = ?", o.Reactionside), + } + + queryMods = append(queryMods, mods...) + + return Reactionsides(queryMods...) +} + +// ReactionsideReactionReaction pointed to by the foreign key. +func (o *ReactionsideReaction) ReactionsideReactionReaction(mods ...qm.QueryMod) reactionQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"accession\" = ?", o.Reaction), + } + + queryMods = append(queryMods, mods...) + + return Reactions(queryMods...) +} + +// LoadReactionsideReactionReactionside allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (reactionsideReactionL) LoadReactionsideReactionReactionside(ctx context.Context, e boil.ContextExecutor, singular bool, maybeReactionsideReaction interface{}, mods queries.Applicator) error { + var slice []*ReactionsideReaction + var object *ReactionsideReaction + + if singular { + object = maybeReactionsideReaction.(*ReactionsideReaction) + } else { + slice = *maybeReactionsideReaction.(*[]*ReactionsideReaction) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &reactionsideReactionR{} + } + if !queries.IsNil(object.Reactionside) { + args = append(args, object.Reactionside) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &reactionsideReactionR{} + } + + for _, a := range args { + if queries.Equal(a, obj.Reactionside) { + continue Outer + } + } + + if !queries.IsNil(obj.Reactionside) { + args = append(args, obj.Reactionside) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`reactionside`), + qm.WhereIn(`reactionside.accession in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load Reactionside") + } + + var resultSlice []*Reactionside + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice Reactionside") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for reactionside") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for reactionside") + } + + if len(reactionsideReactionAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.ReactionsideReactionReactionside = foreign + if foreign.R == nil { + foreign.R = &reactionsideR{} + } + foreign.R.ReactionsideReactions = append(foreign.R.ReactionsideReactions, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.Reactionside, foreign.Accession) { + local.R.ReactionsideReactionReactionside = foreign + if foreign.R == nil { + foreign.R = &reactionsideR{} + } + foreign.R.ReactionsideReactions = append(foreign.R.ReactionsideReactions, local) + break + } + } + } + + return nil +} + +// LoadReactionsideReactionReaction allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (reactionsideReactionL) LoadReactionsideReactionReaction(ctx context.Context, e boil.ContextExecutor, singular bool, maybeReactionsideReaction interface{}, mods queries.Applicator) error { + var slice []*ReactionsideReaction + var object *ReactionsideReaction + + if singular { + object = maybeReactionsideReaction.(*ReactionsideReaction) + } else { + slice = *maybeReactionsideReaction.(*[]*ReactionsideReaction) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &reactionsideReactionR{} + } + if !queries.IsNil(object.Reaction) { + args = append(args, object.Reaction) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &reactionsideReactionR{} + } + + for _, a := range args { + if queries.Equal(a, obj.Reaction) { + continue Outer + } + } + + if !queries.IsNil(obj.Reaction) { + args = append(args, obj.Reaction) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`reaction`), + qm.WhereIn(`reaction.accession in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load Reaction") + } + + var resultSlice []*Reaction + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice Reaction") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for reaction") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for reaction") + } + + if len(reactionsideReactionAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.ReactionsideReactionReaction = foreign + if foreign.R == nil { + foreign.R = &reactionR{} + } + foreign.R.ReactionsideReactions = append(foreign.R.ReactionsideReactions, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.Reaction, foreign.Accession) { + local.R.ReactionsideReactionReaction = foreign + if foreign.R == nil { + foreign.R = &reactionR{} + } + foreign.R.ReactionsideReactions = append(foreign.R.ReactionsideReactions, local) + break + } + } + } + + return nil +} + +// SetReactionsideReactionReactionside of the reactionsideReaction to the related item. +// Sets o.R.ReactionsideReactionReactionside to related. +// Adds o to related.R.ReactionsideReactions. +func (o *ReactionsideReaction) SetReactionsideReactionReactionside(ctx context.Context, exec boil.ContextExecutor, insert bool, related *Reactionside) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"reactionside_reaction\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"reactionside"}), + strmangle.WhereClause("\"", "\"", 0, reactionsideReactionPrimaryKeyColumns), + ) + values := []interface{}{related.Accession, o.Reaction, o.Reactionside} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.Reactionside, related.Accession) + if o.R == nil { + o.R = &reactionsideReactionR{ + ReactionsideReactionReactionside: related, + } + } else { + o.R.ReactionsideReactionReactionside = related + } + + if related.R == nil { + related.R = &reactionsideR{ + ReactionsideReactions: ReactionsideReactionSlice{o}, + } + } else { + related.R.ReactionsideReactions = append(related.R.ReactionsideReactions, o) + } + + return nil +} + +// SetReactionsideReactionReaction of the reactionsideReaction to the related item. +// Sets o.R.ReactionsideReactionReaction to related. +// Adds o to related.R.ReactionsideReactions. +func (o *ReactionsideReaction) SetReactionsideReactionReaction(ctx context.Context, exec boil.ContextExecutor, insert bool, related *Reaction) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"reactionside_reaction\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"reaction"}), + strmangle.WhereClause("\"", "\"", 0, reactionsideReactionPrimaryKeyColumns), + ) + values := []interface{}{related.Accession, o.Reaction, o.Reactionside} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.Reaction, related.Accession) + if o.R == nil { + o.R = &reactionsideReactionR{ + ReactionsideReactionReaction: related, + } + } else { + o.R.ReactionsideReactionReaction = related + } + + if related.R == nil { + related.R = &reactionR{ + ReactionsideReactions: ReactionsideReactionSlice{o}, + } + } else { + related.R.ReactionsideReactions = append(related.R.ReactionsideReactions, o) + } + + return nil +} + +// ReactionsideReactions retrieves all the records using an executor. +func ReactionsideReactions(mods ...qm.QueryMod) reactionsideReactionQuery { + mods = append(mods, qm.From("\"reactionside_reaction\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"reactionside_reaction\".*"}) + } + + return reactionsideReactionQuery{q} +} + +// FindReactionsideReaction retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindReactionsideReaction(ctx context.Context, exec boil.ContextExecutor, reaction string, reactionside string, selectCols ...string) (*ReactionsideReaction, error) { + reactionsideReactionObj := &ReactionsideReaction{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"reactionside_reaction\" where \"reaction\"=? AND \"reactionside\"=?", sel, + ) + + q := queries.Raw(query, reaction, reactionside) + + err := q.Bind(ctx, exec, reactionsideReactionObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from reactionside_reaction") + } + + if err = reactionsideReactionObj.doAfterSelectHooks(ctx, exec); err != nil { + return reactionsideReactionObj, err + } + + return reactionsideReactionObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *ReactionsideReaction) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no reactionside_reaction provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(reactionsideReactionColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + reactionsideReactionInsertCacheMut.RLock() + cache, cached := reactionsideReactionInsertCache[key] + reactionsideReactionInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + reactionsideReactionAllColumns, + reactionsideReactionColumnsWithDefault, + reactionsideReactionColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(reactionsideReactionType, reactionsideReactionMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(reactionsideReactionType, reactionsideReactionMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"reactionside_reaction\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"reactionside_reaction\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into reactionside_reaction") + } + + if !cached { + reactionsideReactionInsertCacheMut.Lock() + reactionsideReactionInsertCache[key] = cache + reactionsideReactionInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the ReactionsideReaction. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *ReactionsideReaction) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + reactionsideReactionUpdateCacheMut.RLock() + cache, cached := reactionsideReactionUpdateCache[key] + reactionsideReactionUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + reactionsideReactionAllColumns, + reactionsideReactionPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update reactionside_reaction, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"reactionside_reaction\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, reactionsideReactionPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(reactionsideReactionType, reactionsideReactionMapping, append(wl, reactionsideReactionPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update reactionside_reaction row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for reactionside_reaction") + } + + if !cached { + reactionsideReactionUpdateCacheMut.Lock() + reactionsideReactionUpdateCache[key] = cache + reactionsideReactionUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q reactionsideReactionQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for reactionside_reaction") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for reactionside_reaction") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o ReactionsideReactionSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), reactionsideReactionPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"reactionside_reaction\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, reactionsideReactionPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in reactionsideReaction slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all reactionsideReaction") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *ReactionsideReaction) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no reactionside_reaction provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(reactionsideReactionColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + reactionsideReactionUpsertCacheMut.RLock() + cache, cached := reactionsideReactionUpsertCache[key] + reactionsideReactionUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + reactionsideReactionAllColumns, + reactionsideReactionColumnsWithDefault, + reactionsideReactionColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + reactionsideReactionAllColumns, + reactionsideReactionPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert reactionside_reaction, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(reactionsideReactionPrimaryKeyColumns)) + copy(conflict, reactionsideReactionPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"reactionside_reaction\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(reactionsideReactionType, reactionsideReactionMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(reactionsideReactionType, reactionsideReactionMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert reactionside_reaction") + } + + if !cached { + reactionsideReactionUpsertCacheMut.Lock() + reactionsideReactionUpsertCache[key] = cache + reactionsideReactionUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single ReactionsideReaction record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *ReactionsideReaction) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no ReactionsideReaction provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), reactionsideReactionPrimaryKeyMapping) + sql := "DELETE FROM \"reactionside_reaction\" WHERE \"reaction\"=? AND \"reactionside\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from reactionside_reaction") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for reactionside_reaction") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q reactionsideReactionQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no reactionsideReactionQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from reactionside_reaction") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for reactionside_reaction") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o ReactionsideReactionSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(reactionsideReactionBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), reactionsideReactionPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"reactionside_reaction\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, reactionsideReactionPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from reactionsideReaction slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for reactionside_reaction") + } + + if len(reactionsideReactionAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *ReactionsideReaction) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindReactionsideReaction(ctx, exec, o.Reaction, o.Reactionside) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *ReactionsideReactionSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := ReactionsideReactionSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), reactionsideReactionPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"reactionside_reaction\".* FROM \"reactionside_reaction\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, reactionsideReactionPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in ReactionsideReactionSlice") + } + + *o = slice + + return nil +} + +// ReactionsideReactionExists checks if the ReactionsideReaction row exists. +func ReactionsideReactionExists(ctx context.Context, exec boil.ContextExecutor, reaction string, reactionside string) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"reactionside_reaction\" where \"reaction\"=? AND \"reactionside\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, reaction, reactionside) + } + row := exec.QueryRowContext(ctx, sql, reaction, reactionside) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if reactionside_reaction exists") + } + + return exists, nil +} diff --git a/models/reactive_part.go b/models/reactive_part.go new file mode 100644 index 0000000..3d7a6b1 --- /dev/null +++ b/models/reactive_part.go @@ -0,0 +1,1088 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// ReactivePart is an object representing the database table. +type ReactivePart struct { + ID null.Int64 `boil:"id" json:"id,omitempty" toml:"id" yaml:"id,omitempty"` + Accession null.String `boil:"accession" json:"accession,omitempty" toml:"accession" yaml:"accession,omitempty"` + Name null.String `boil:"name" json:"name,omitempty" toml:"name" yaml:"name,omitempty"` + HTMLName null.String `boil:"html_name" json:"html_name,omitempty" toml:"html_name" yaml:"html_name,omitempty"` + Compound string `boil:"compound" json:"compound" toml:"compound" yaml:"compound"` + + R *reactivePartR `boil:"-" json:"-" toml:"-" yaml:"-"` + L reactivePartL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var ReactivePartColumns = struct { + ID string + Accession string + Name string + HTMLName string + Compound string +}{ + ID: "id", + Accession: "accession", + Name: "name", + HTMLName: "html_name", + Compound: "compound", +} + +var ReactivePartTableColumns = struct { + ID string + Accession string + Name string + HTMLName string + Compound string +}{ + ID: "reactive_part.id", + Accession: "reactive_part.accession", + Name: "reactive_part.name", + HTMLName: "reactive_part.html_name", + Compound: "reactive_part.compound", +} + +// Generated where + +var ReactivePartWhere = struct { + ID whereHelpernull_Int64 + Accession whereHelpernull_String + Name whereHelpernull_String + HTMLName whereHelpernull_String + Compound whereHelperstring +}{ + ID: whereHelpernull_Int64{field: "\"reactive_part\".\"id\""}, + Accession: whereHelpernull_String{field: "\"reactive_part\".\"accession\""}, + Name: whereHelpernull_String{field: "\"reactive_part\".\"name\""}, + HTMLName: whereHelpernull_String{field: "\"reactive_part\".\"html_name\""}, + Compound: whereHelperstring{field: "\"reactive_part\".\"compound\""}, +} + +// ReactivePartRels is where relationship names are stored. +var ReactivePartRels = struct { + ReactivePartCompound string +}{ + ReactivePartCompound: "ReactivePartCompound", +} + +// reactivePartR is where relationships are stored. +type reactivePartR struct { + ReactivePartCompound *Compound `boil:"ReactivePartCompound" json:"ReactivePartCompound" toml:"ReactivePartCompound" yaml:"ReactivePartCompound"` +} + +// NewStruct creates a new relationship struct +func (*reactivePartR) NewStruct() *reactivePartR { + return &reactivePartR{} +} + +func (r *reactivePartR) GetReactivePartCompound() *Compound { + if r == nil { + return nil + } + return r.ReactivePartCompound +} + +// reactivePartL is where Load methods for each relationship are stored. +type reactivePartL struct{} + +var ( + reactivePartAllColumns = []string{"id", "accession", "name", "html_name", "compound"} + reactivePartColumnsWithoutDefault = []string{"compound"} + reactivePartColumnsWithDefault = []string{"id", "accession", "name", "html_name"} + reactivePartPrimaryKeyColumns = []string{"accession"} + reactivePartGeneratedColumns = []string{} +) + +type ( + // ReactivePartSlice is an alias for a slice of pointers to ReactivePart. + // This should almost always be used instead of []ReactivePart. + ReactivePartSlice []*ReactivePart + // ReactivePartHook is the signature for custom ReactivePart hook methods + ReactivePartHook func(context.Context, boil.ContextExecutor, *ReactivePart) error + + reactivePartQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + reactivePartType = reflect.TypeOf(&ReactivePart{}) + reactivePartMapping = queries.MakeStructMapping(reactivePartType) + reactivePartPrimaryKeyMapping, _ = queries.BindMapping(reactivePartType, reactivePartMapping, reactivePartPrimaryKeyColumns) + reactivePartInsertCacheMut sync.RWMutex + reactivePartInsertCache = make(map[string]insertCache) + reactivePartUpdateCacheMut sync.RWMutex + reactivePartUpdateCache = make(map[string]updateCache) + reactivePartUpsertCacheMut sync.RWMutex + reactivePartUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var reactivePartAfterSelectHooks []ReactivePartHook + +var reactivePartBeforeInsertHooks []ReactivePartHook +var reactivePartAfterInsertHooks []ReactivePartHook + +var reactivePartBeforeUpdateHooks []ReactivePartHook +var reactivePartAfterUpdateHooks []ReactivePartHook + +var reactivePartBeforeDeleteHooks []ReactivePartHook +var reactivePartAfterDeleteHooks []ReactivePartHook + +var reactivePartBeforeUpsertHooks []ReactivePartHook +var reactivePartAfterUpsertHooks []ReactivePartHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *ReactivePart) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range reactivePartAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *ReactivePart) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range reactivePartBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *ReactivePart) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range reactivePartAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *ReactivePart) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range reactivePartBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *ReactivePart) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range reactivePartAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *ReactivePart) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range reactivePartBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *ReactivePart) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range reactivePartAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *ReactivePart) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range reactivePartBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *ReactivePart) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range reactivePartAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddReactivePartHook registers your hook function for all future operations. +func AddReactivePartHook(hookPoint boil.HookPoint, reactivePartHook ReactivePartHook) { + switch hookPoint { + case boil.AfterSelectHook: + reactivePartAfterSelectHooks = append(reactivePartAfterSelectHooks, reactivePartHook) + case boil.BeforeInsertHook: + reactivePartBeforeInsertHooks = append(reactivePartBeforeInsertHooks, reactivePartHook) + case boil.AfterInsertHook: + reactivePartAfterInsertHooks = append(reactivePartAfterInsertHooks, reactivePartHook) + case boil.BeforeUpdateHook: + reactivePartBeforeUpdateHooks = append(reactivePartBeforeUpdateHooks, reactivePartHook) + case boil.AfterUpdateHook: + reactivePartAfterUpdateHooks = append(reactivePartAfterUpdateHooks, reactivePartHook) + case boil.BeforeDeleteHook: + reactivePartBeforeDeleteHooks = append(reactivePartBeforeDeleteHooks, reactivePartHook) + case boil.AfterDeleteHook: + reactivePartAfterDeleteHooks = append(reactivePartAfterDeleteHooks, reactivePartHook) + case boil.BeforeUpsertHook: + reactivePartBeforeUpsertHooks = append(reactivePartBeforeUpsertHooks, reactivePartHook) + case boil.AfterUpsertHook: + reactivePartAfterUpsertHooks = append(reactivePartAfterUpsertHooks, reactivePartHook) + } +} + +// One returns a single reactivePart record from the query. +func (q reactivePartQuery) One(ctx context.Context, exec boil.ContextExecutor) (*ReactivePart, error) { + o := &ReactivePart{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for reactive_part") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all ReactivePart records from the query. +func (q reactivePartQuery) All(ctx context.Context, exec boil.ContextExecutor) (ReactivePartSlice, error) { + var o []*ReactivePart + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to ReactivePart slice") + } + + if len(reactivePartAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all ReactivePart records in the query. +func (q reactivePartQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count reactive_part rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q reactivePartQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if reactive_part exists") + } + + return count > 0, nil +} + +// ReactivePartCompound pointed to by the foreign key. +func (o *ReactivePart) ReactivePartCompound(mods ...qm.QueryMod) compoundQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"accession\" = ?", o.Compound), + } + + queryMods = append(queryMods, mods...) + + return Compounds(queryMods...) +} + +// LoadReactivePartCompound allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (reactivePartL) LoadReactivePartCompound(ctx context.Context, e boil.ContextExecutor, singular bool, maybeReactivePart interface{}, mods queries.Applicator) error { + var slice []*ReactivePart + var object *ReactivePart + + if singular { + object = maybeReactivePart.(*ReactivePart) + } else { + slice = *maybeReactivePart.(*[]*ReactivePart) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &reactivePartR{} + } + if !queries.IsNil(object.Compound) { + args = append(args, object.Compound) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &reactivePartR{} + } + + for _, a := range args { + if queries.Equal(a, obj.Compound) { + continue Outer + } + } + + if !queries.IsNil(obj.Compound) { + args = append(args, obj.Compound) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`compound`), + qm.WhereIn(`compound.accession in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load Compound") + } + + var resultSlice []*Compound + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice Compound") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for compound") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for compound") + } + + if len(reactivePartAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.ReactivePartCompound = foreign + if foreign.R == nil { + foreign.R = &compoundR{} + } + foreign.R.ReactiveParts = append(foreign.R.ReactiveParts, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.Compound, foreign.Accession) { + local.R.ReactivePartCompound = foreign + if foreign.R == nil { + foreign.R = &compoundR{} + } + foreign.R.ReactiveParts = append(foreign.R.ReactiveParts, local) + break + } + } + } + + return nil +} + +// SetReactivePartCompound of the reactivePart to the related item. +// Sets o.R.ReactivePartCompound to related. +// Adds o to related.R.ReactiveParts. +func (o *ReactivePart) SetReactivePartCompound(ctx context.Context, exec boil.ContextExecutor, insert bool, related *Compound) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"reactive_part\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"compound"}), + strmangle.WhereClause("\"", "\"", 0, reactivePartPrimaryKeyColumns), + ) + values := []interface{}{related.Accession, o.Accession} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.Compound, related.Accession) + if o.R == nil { + o.R = &reactivePartR{ + ReactivePartCompound: related, + } + } else { + o.R.ReactivePartCompound = related + } + + if related.R == nil { + related.R = &compoundR{ + ReactiveParts: ReactivePartSlice{o}, + } + } else { + related.R.ReactiveParts = append(related.R.ReactiveParts, o) + } + + return nil +} + +// ReactiveParts retrieves all the records using an executor. +func ReactiveParts(mods ...qm.QueryMod) reactivePartQuery { + mods = append(mods, qm.From("\"reactive_part\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"reactive_part\".*"}) + } + + return reactivePartQuery{q} +} + +// FindReactivePart retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindReactivePart(ctx context.Context, exec boil.ContextExecutor, accession null.String, selectCols ...string) (*ReactivePart, error) { + reactivePartObj := &ReactivePart{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"reactive_part\" where \"accession\"=?", sel, + ) + + q := queries.Raw(query, accession) + + err := q.Bind(ctx, exec, reactivePartObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from reactive_part") + } + + if err = reactivePartObj.doAfterSelectHooks(ctx, exec); err != nil { + return reactivePartObj, err + } + + return reactivePartObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *ReactivePart) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no reactive_part provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(reactivePartColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + reactivePartInsertCacheMut.RLock() + cache, cached := reactivePartInsertCache[key] + reactivePartInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + reactivePartAllColumns, + reactivePartColumnsWithDefault, + reactivePartColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(reactivePartType, reactivePartMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(reactivePartType, reactivePartMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"reactive_part\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"reactive_part\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into reactive_part") + } + + if !cached { + reactivePartInsertCacheMut.Lock() + reactivePartInsertCache[key] = cache + reactivePartInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the ReactivePart. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *ReactivePart) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + reactivePartUpdateCacheMut.RLock() + cache, cached := reactivePartUpdateCache[key] + reactivePartUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + reactivePartAllColumns, + reactivePartPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update reactive_part, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"reactive_part\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, reactivePartPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(reactivePartType, reactivePartMapping, append(wl, reactivePartPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update reactive_part row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for reactive_part") + } + + if !cached { + reactivePartUpdateCacheMut.Lock() + reactivePartUpdateCache[key] = cache + reactivePartUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q reactivePartQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for reactive_part") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for reactive_part") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o ReactivePartSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), reactivePartPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"reactive_part\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, reactivePartPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in reactivePart slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all reactivePart") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *ReactivePart) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no reactive_part provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(reactivePartColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + reactivePartUpsertCacheMut.RLock() + cache, cached := reactivePartUpsertCache[key] + reactivePartUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + reactivePartAllColumns, + reactivePartColumnsWithDefault, + reactivePartColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + reactivePartAllColumns, + reactivePartPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert reactive_part, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(reactivePartPrimaryKeyColumns)) + copy(conflict, reactivePartPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"reactive_part\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(reactivePartType, reactivePartMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(reactivePartType, reactivePartMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert reactive_part") + } + + if !cached { + reactivePartUpsertCacheMut.Lock() + reactivePartUpsertCache[key] = cache + reactivePartUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single ReactivePart record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *ReactivePart) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no ReactivePart provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), reactivePartPrimaryKeyMapping) + sql := "DELETE FROM \"reactive_part\" WHERE \"accession\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from reactive_part") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for reactive_part") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q reactivePartQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no reactivePartQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from reactive_part") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for reactive_part") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o ReactivePartSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(reactivePartBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), reactivePartPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"reactive_part\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, reactivePartPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from reactivePart slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for reactive_part") + } + + if len(reactivePartAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *ReactivePart) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindReactivePart(ctx, exec, o.Accession) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *ReactivePartSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := ReactivePartSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), reactivePartPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"reactive_part\".* FROM \"reactive_part\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, reactivePartPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in ReactivePartSlice") + } + + *o = slice + + return nil +} + +// ReactivePartExists checks if the ReactivePart row exists. +func ReactivePartExists(ctx context.Context, exec boil.ContextExecutor, accession null.String) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"reactive_part\" where \"accession\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, accession) + } + row := exec.QueryRowContext(ctx, sql, accession) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if reactive_part exists") + } + + return exists, nil +} diff --git a/models/relationship_type.go b/models/relationship_type.go new file mode 100644 index 0000000..fba3c4f --- /dev/null +++ b/models/relationship_type.go @@ -0,0 +1,1140 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// RelationshipType is an object representing the database table. +type RelationshipType struct { + RelationshipType string `boil:"relationship_type" json:"relationship_type" toml:"relationship_type" yaml:"relationship_type"` + RelationshipDesc null.String `boil:"relationship_desc" json:"relationship_desc,omitempty" toml:"relationship_desc" yaml:"relationship_desc,omitempty"` + + R *relationshipTypeR `boil:"-" json:"-" toml:"-" yaml:"-"` + L relationshipTypeL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var RelationshipTypeColumns = struct { + RelationshipType string + RelationshipDesc string +}{ + RelationshipType: "relationship_type", + RelationshipDesc: "relationship_desc", +} + +var RelationshipTypeTableColumns = struct { + RelationshipType string + RelationshipDesc string +}{ + RelationshipType: "relationship_type.relationship_type", + RelationshipDesc: "relationship_type.relationship_desc", +} + +// Generated where + +var RelationshipTypeWhere = struct { + RelationshipType whereHelperstring + RelationshipDesc whereHelpernull_String +}{ + RelationshipType: whereHelperstring{field: "\"relationship_type\".\"relationship_type\""}, + RelationshipDesc: whereHelpernull_String{field: "\"relationship_type\".\"relationship_desc\""}, +} + +// RelationshipTypeRels is where relationship names are stored. +var RelationshipTypeRels = struct { + Assays string +}{ + Assays: "Assays", +} + +// relationshipTypeR is where relationships are stored. +type relationshipTypeR struct { + Assays AssaySlice `boil:"Assays" json:"Assays" toml:"Assays" yaml:"Assays"` +} + +// NewStruct creates a new relationship struct +func (*relationshipTypeR) NewStruct() *relationshipTypeR { + return &relationshipTypeR{} +} + +func (r *relationshipTypeR) GetAssays() AssaySlice { + if r == nil { + return nil + } + return r.Assays +} + +// relationshipTypeL is where Load methods for each relationship are stored. +type relationshipTypeL struct{} + +var ( + relationshipTypeAllColumns = []string{"relationship_type", "relationship_desc"} + relationshipTypeColumnsWithoutDefault = []string{"relationship_type"} + relationshipTypeColumnsWithDefault = []string{"relationship_desc"} + relationshipTypePrimaryKeyColumns = []string{"relationship_type"} + relationshipTypeGeneratedColumns = []string{} +) + +type ( + // RelationshipTypeSlice is an alias for a slice of pointers to RelationshipType. + // This should almost always be used instead of []RelationshipType. + RelationshipTypeSlice []*RelationshipType + // RelationshipTypeHook is the signature for custom RelationshipType hook methods + RelationshipTypeHook func(context.Context, boil.ContextExecutor, *RelationshipType) error + + relationshipTypeQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + relationshipTypeType = reflect.TypeOf(&RelationshipType{}) + relationshipTypeMapping = queries.MakeStructMapping(relationshipTypeType) + relationshipTypePrimaryKeyMapping, _ = queries.BindMapping(relationshipTypeType, relationshipTypeMapping, relationshipTypePrimaryKeyColumns) + relationshipTypeInsertCacheMut sync.RWMutex + relationshipTypeInsertCache = make(map[string]insertCache) + relationshipTypeUpdateCacheMut sync.RWMutex + relationshipTypeUpdateCache = make(map[string]updateCache) + relationshipTypeUpsertCacheMut sync.RWMutex + relationshipTypeUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var relationshipTypeAfterSelectHooks []RelationshipTypeHook + +var relationshipTypeBeforeInsertHooks []RelationshipTypeHook +var relationshipTypeAfterInsertHooks []RelationshipTypeHook + +var relationshipTypeBeforeUpdateHooks []RelationshipTypeHook +var relationshipTypeAfterUpdateHooks []RelationshipTypeHook + +var relationshipTypeBeforeDeleteHooks []RelationshipTypeHook +var relationshipTypeAfterDeleteHooks []RelationshipTypeHook + +var relationshipTypeBeforeUpsertHooks []RelationshipTypeHook +var relationshipTypeAfterUpsertHooks []RelationshipTypeHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *RelationshipType) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range relationshipTypeAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *RelationshipType) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range relationshipTypeBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *RelationshipType) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range relationshipTypeAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *RelationshipType) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range relationshipTypeBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *RelationshipType) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range relationshipTypeAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *RelationshipType) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range relationshipTypeBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *RelationshipType) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range relationshipTypeAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *RelationshipType) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range relationshipTypeBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *RelationshipType) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range relationshipTypeAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddRelationshipTypeHook registers your hook function for all future operations. +func AddRelationshipTypeHook(hookPoint boil.HookPoint, relationshipTypeHook RelationshipTypeHook) { + switch hookPoint { + case boil.AfterSelectHook: + relationshipTypeAfterSelectHooks = append(relationshipTypeAfterSelectHooks, relationshipTypeHook) + case boil.BeforeInsertHook: + relationshipTypeBeforeInsertHooks = append(relationshipTypeBeforeInsertHooks, relationshipTypeHook) + case boil.AfterInsertHook: + relationshipTypeAfterInsertHooks = append(relationshipTypeAfterInsertHooks, relationshipTypeHook) + case boil.BeforeUpdateHook: + relationshipTypeBeforeUpdateHooks = append(relationshipTypeBeforeUpdateHooks, relationshipTypeHook) + case boil.AfterUpdateHook: + relationshipTypeAfterUpdateHooks = append(relationshipTypeAfterUpdateHooks, relationshipTypeHook) + case boil.BeforeDeleteHook: + relationshipTypeBeforeDeleteHooks = append(relationshipTypeBeforeDeleteHooks, relationshipTypeHook) + case boil.AfterDeleteHook: + relationshipTypeAfterDeleteHooks = append(relationshipTypeAfterDeleteHooks, relationshipTypeHook) + case boil.BeforeUpsertHook: + relationshipTypeBeforeUpsertHooks = append(relationshipTypeBeforeUpsertHooks, relationshipTypeHook) + case boil.AfterUpsertHook: + relationshipTypeAfterUpsertHooks = append(relationshipTypeAfterUpsertHooks, relationshipTypeHook) + } +} + +// One returns a single relationshipType record from the query. +func (q relationshipTypeQuery) One(ctx context.Context, exec boil.ContextExecutor) (*RelationshipType, error) { + o := &RelationshipType{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for relationship_type") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all RelationshipType records from the query. +func (q relationshipTypeQuery) All(ctx context.Context, exec boil.ContextExecutor) (RelationshipTypeSlice, error) { + var o []*RelationshipType + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to RelationshipType slice") + } + + if len(relationshipTypeAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all RelationshipType records in the query. +func (q relationshipTypeQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count relationship_type rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q relationshipTypeQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if relationship_type exists") + } + + return count > 0, nil +} + +// Assays retrieves all the assay's Assays with an executor. +func (o *RelationshipType) Assays(mods ...qm.QueryMod) assayQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"assays\".\"relationship_type\"=?", o.RelationshipType), + ) + + return Assays(queryMods...) +} + +// LoadAssays allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (relationshipTypeL) LoadAssays(ctx context.Context, e boil.ContextExecutor, singular bool, maybeRelationshipType interface{}, mods queries.Applicator) error { + var slice []*RelationshipType + var object *RelationshipType + + if singular { + object = maybeRelationshipType.(*RelationshipType) + } else { + slice = *maybeRelationshipType.(*[]*RelationshipType) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &relationshipTypeR{} + } + args = append(args, object.RelationshipType) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &relationshipTypeR{} + } + + for _, a := range args { + if queries.Equal(a, obj.RelationshipType) { + continue Outer + } + } + + args = append(args, obj.RelationshipType) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`assays`), + qm.WhereIn(`assays.relationship_type in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load assays") + } + + var resultSlice []*Assay + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice assays") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on assays") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for assays") + } + + if len(assayAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.Assays = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &assayR{} + } + foreign.R.AssayRelationshipType = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.RelationshipType, foreign.RelationshipType) { + local.R.Assays = append(local.R.Assays, foreign) + if foreign.R == nil { + foreign.R = &assayR{} + } + foreign.R.AssayRelationshipType = local + break + } + } + } + + return nil +} + +// AddAssays adds the given related objects to the existing relationships +// of the relationship_type, optionally inserting them as new records. +// Appends related to o.R.Assays. +// Sets related.R.AssayRelationshipType appropriately. +func (o *RelationshipType) AddAssays(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Assay) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.RelationshipType, o.RelationshipType) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"assays\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"relationship_type"}), + strmangle.WhereClause("\"", "\"", 0, assayPrimaryKeyColumns), + ) + values := []interface{}{o.RelationshipType, rel.AssayID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.RelationshipType, o.RelationshipType) + } + } + + if o.R == nil { + o.R = &relationshipTypeR{ + Assays: related, + } + } else { + o.R.Assays = append(o.R.Assays, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &assayR{ + AssayRelationshipType: o, + } + } else { + rel.R.AssayRelationshipType = o + } + } + return nil +} + +// SetAssays removes all previously related items of the +// relationship_type replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.AssayRelationshipType's Assays accordingly. +// Replaces o.R.Assays with related. +// Sets related.R.AssayRelationshipType's Assays accordingly. +func (o *RelationshipType) SetAssays(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Assay) error { + query := "update \"assays\" set \"relationship_type\" = null where \"relationship_type\" = ?" + values := []interface{}{o.RelationshipType} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + if o.R != nil { + for _, rel := range o.R.Assays { + queries.SetScanner(&rel.RelationshipType, nil) + if rel.R == nil { + continue + } + + rel.R.AssayRelationshipType = nil + } + o.R.Assays = nil + } + + return o.AddAssays(ctx, exec, insert, related...) +} + +// RemoveAssays relationships from objects passed in. +// Removes related items from R.Assays (uses pointer comparison, removal does not keep order) +// Sets related.R.AssayRelationshipType. +func (o *RelationshipType) RemoveAssays(ctx context.Context, exec boil.ContextExecutor, related ...*Assay) error { + if len(related) == 0 { + return nil + } + + var err error + for _, rel := range related { + queries.SetScanner(&rel.RelationshipType, nil) + if rel.R != nil { + rel.R.AssayRelationshipType = nil + } + if _, err = rel.Update(ctx, exec, boil.Whitelist("relationship_type")); err != nil { + return err + } + } + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.Assays { + if rel != ri { + continue + } + + ln := len(o.R.Assays) + if ln > 1 && i < ln-1 { + o.R.Assays[i] = o.R.Assays[ln-1] + } + o.R.Assays = o.R.Assays[:ln-1] + break + } + } + + return nil +} + +// RelationshipTypes retrieves all the records using an executor. +func RelationshipTypes(mods ...qm.QueryMod) relationshipTypeQuery { + mods = append(mods, qm.From("\"relationship_type\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"relationship_type\".*"}) + } + + return relationshipTypeQuery{q} +} + +// FindRelationshipType retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindRelationshipType(ctx context.Context, exec boil.ContextExecutor, relationshipType string, selectCols ...string) (*RelationshipType, error) { + relationshipTypeObj := &RelationshipType{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"relationship_type\" where \"relationship_type\"=?", sel, + ) + + q := queries.Raw(query, relationshipType) + + err := q.Bind(ctx, exec, relationshipTypeObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from relationship_type") + } + + if err = relationshipTypeObj.doAfterSelectHooks(ctx, exec); err != nil { + return relationshipTypeObj, err + } + + return relationshipTypeObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *RelationshipType) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no relationship_type provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(relationshipTypeColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + relationshipTypeInsertCacheMut.RLock() + cache, cached := relationshipTypeInsertCache[key] + relationshipTypeInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + relationshipTypeAllColumns, + relationshipTypeColumnsWithDefault, + relationshipTypeColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(relationshipTypeType, relationshipTypeMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(relationshipTypeType, relationshipTypeMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"relationship_type\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"relationship_type\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into relationship_type") + } + + if !cached { + relationshipTypeInsertCacheMut.Lock() + relationshipTypeInsertCache[key] = cache + relationshipTypeInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the RelationshipType. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *RelationshipType) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + relationshipTypeUpdateCacheMut.RLock() + cache, cached := relationshipTypeUpdateCache[key] + relationshipTypeUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + relationshipTypeAllColumns, + relationshipTypePrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update relationship_type, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"relationship_type\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, relationshipTypePrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(relationshipTypeType, relationshipTypeMapping, append(wl, relationshipTypePrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update relationship_type row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for relationship_type") + } + + if !cached { + relationshipTypeUpdateCacheMut.Lock() + relationshipTypeUpdateCache[key] = cache + relationshipTypeUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q relationshipTypeQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for relationship_type") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for relationship_type") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o RelationshipTypeSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), relationshipTypePrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"relationship_type\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, relationshipTypePrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in relationshipType slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all relationshipType") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *RelationshipType) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no relationship_type provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(relationshipTypeColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + relationshipTypeUpsertCacheMut.RLock() + cache, cached := relationshipTypeUpsertCache[key] + relationshipTypeUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + relationshipTypeAllColumns, + relationshipTypeColumnsWithDefault, + relationshipTypeColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + relationshipTypeAllColumns, + relationshipTypePrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert relationship_type, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(relationshipTypePrimaryKeyColumns)) + copy(conflict, relationshipTypePrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"relationship_type\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(relationshipTypeType, relationshipTypeMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(relationshipTypeType, relationshipTypeMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert relationship_type") + } + + if !cached { + relationshipTypeUpsertCacheMut.Lock() + relationshipTypeUpsertCache[key] = cache + relationshipTypeUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single RelationshipType record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *RelationshipType) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no RelationshipType provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), relationshipTypePrimaryKeyMapping) + sql := "DELETE FROM \"relationship_type\" WHERE \"relationship_type\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from relationship_type") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for relationship_type") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q relationshipTypeQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no relationshipTypeQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from relationship_type") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for relationship_type") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o RelationshipTypeSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(relationshipTypeBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), relationshipTypePrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"relationship_type\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, relationshipTypePrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from relationshipType slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for relationship_type") + } + + if len(relationshipTypeAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *RelationshipType) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindRelationshipType(ctx, exec, o.RelationshipType) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *RelationshipTypeSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := RelationshipTypeSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), relationshipTypePrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"relationship_type\".* FROM \"relationship_type\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, relationshipTypePrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in RelationshipTypeSlice") + } + + *o = slice + + return nil +} + +// RelationshipTypeExists checks if the RelationshipType row exists. +func RelationshipTypeExists(ctx context.Context, exec boil.ContextExecutor, relationshipType string) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"relationship_type\" where \"relationship_type\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, relationshipType) + } + row := exec.QueryRowContext(ctx, sql, relationshipType) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if relationship_type exists") + } + + return exists, nil +} diff --git a/models/research_companies.go b/models/research_companies.go new file mode 100644 index 0000000..5779da0 --- /dev/null +++ b/models/research_companies.go @@ -0,0 +1,1121 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// ResearchCompany is an object representing the database table. +type ResearchCompany struct { + CoStemID int64 `boil:"co_stem_id" json:"co_stem_id" toml:"co_stem_id" yaml:"co_stem_id"` + ResStemID null.Int64 `boil:"res_stem_id" json:"res_stem_id,omitempty" toml:"res_stem_id" yaml:"res_stem_id,omitempty"` + Company null.String `boil:"company" json:"company,omitempty" toml:"company" yaml:"company,omitempty"` + Country null.String `boil:"country" json:"country,omitempty" toml:"country" yaml:"country,omitempty"` + PreviousCompany null.String `boil:"previous_company" json:"previous_company,omitempty" toml:"previous_company" yaml:"previous_company,omitempty"` + + R *researchCompanyR `boil:"-" json:"-" toml:"-" yaml:"-"` + L researchCompanyL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var ResearchCompanyColumns = struct { + CoStemID string + ResStemID string + Company string + Country string + PreviousCompany string +}{ + CoStemID: "co_stem_id", + ResStemID: "res_stem_id", + Company: "company", + Country: "country", + PreviousCompany: "previous_company", +} + +var ResearchCompanyTableColumns = struct { + CoStemID string + ResStemID string + Company string + Country string + PreviousCompany string +}{ + CoStemID: "research_companies.co_stem_id", + ResStemID: "research_companies.res_stem_id", + Company: "research_companies.company", + Country: "research_companies.country", + PreviousCompany: "research_companies.previous_company", +} + +// Generated where + +var ResearchCompanyWhere = struct { + CoStemID whereHelperint64 + ResStemID whereHelpernull_Int64 + Company whereHelpernull_String + Country whereHelpernull_String + PreviousCompany whereHelpernull_String +}{ + CoStemID: whereHelperint64{field: "\"research_companies\".\"co_stem_id\""}, + ResStemID: whereHelpernull_Int64{field: "\"research_companies\".\"res_stem_id\""}, + Company: whereHelpernull_String{field: "\"research_companies\".\"company\""}, + Country: whereHelpernull_String{field: "\"research_companies\".\"country\""}, + PreviousCompany: whereHelpernull_String{field: "\"research_companies\".\"previous_company\""}, +} + +// ResearchCompanyRels is where relationship names are stored. +var ResearchCompanyRels = struct { + ResStem string +}{ + ResStem: "ResStem", +} + +// researchCompanyR is where relationships are stored. +type researchCompanyR struct { + ResStem *ResearchStem `boil:"ResStem" json:"ResStem" toml:"ResStem" yaml:"ResStem"` +} + +// NewStruct creates a new relationship struct +func (*researchCompanyR) NewStruct() *researchCompanyR { + return &researchCompanyR{} +} + +func (r *researchCompanyR) GetResStem() *ResearchStem { + if r == nil { + return nil + } + return r.ResStem +} + +// researchCompanyL is where Load methods for each relationship are stored. +type researchCompanyL struct{} + +var ( + researchCompanyAllColumns = []string{"co_stem_id", "res_stem_id", "company", "country", "previous_company"} + researchCompanyColumnsWithoutDefault = []string{"co_stem_id"} + researchCompanyColumnsWithDefault = []string{"res_stem_id", "company", "country", "previous_company"} + researchCompanyPrimaryKeyColumns = []string{"co_stem_id"} + researchCompanyGeneratedColumns = []string{} +) + +type ( + // ResearchCompanySlice is an alias for a slice of pointers to ResearchCompany. + // This should almost always be used instead of []ResearchCompany. + ResearchCompanySlice []*ResearchCompany + // ResearchCompanyHook is the signature for custom ResearchCompany hook methods + ResearchCompanyHook func(context.Context, boil.ContextExecutor, *ResearchCompany) error + + researchCompanyQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + researchCompanyType = reflect.TypeOf(&ResearchCompany{}) + researchCompanyMapping = queries.MakeStructMapping(researchCompanyType) + researchCompanyPrimaryKeyMapping, _ = queries.BindMapping(researchCompanyType, researchCompanyMapping, researchCompanyPrimaryKeyColumns) + researchCompanyInsertCacheMut sync.RWMutex + researchCompanyInsertCache = make(map[string]insertCache) + researchCompanyUpdateCacheMut sync.RWMutex + researchCompanyUpdateCache = make(map[string]updateCache) + researchCompanyUpsertCacheMut sync.RWMutex + researchCompanyUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var researchCompanyAfterSelectHooks []ResearchCompanyHook + +var researchCompanyBeforeInsertHooks []ResearchCompanyHook +var researchCompanyAfterInsertHooks []ResearchCompanyHook + +var researchCompanyBeforeUpdateHooks []ResearchCompanyHook +var researchCompanyAfterUpdateHooks []ResearchCompanyHook + +var researchCompanyBeforeDeleteHooks []ResearchCompanyHook +var researchCompanyAfterDeleteHooks []ResearchCompanyHook + +var researchCompanyBeforeUpsertHooks []ResearchCompanyHook +var researchCompanyAfterUpsertHooks []ResearchCompanyHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *ResearchCompany) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range researchCompanyAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *ResearchCompany) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range researchCompanyBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *ResearchCompany) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range researchCompanyAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *ResearchCompany) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range researchCompanyBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *ResearchCompany) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range researchCompanyAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *ResearchCompany) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range researchCompanyBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *ResearchCompany) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range researchCompanyAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *ResearchCompany) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range researchCompanyBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *ResearchCompany) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range researchCompanyAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddResearchCompanyHook registers your hook function for all future operations. +func AddResearchCompanyHook(hookPoint boil.HookPoint, researchCompanyHook ResearchCompanyHook) { + switch hookPoint { + case boil.AfterSelectHook: + researchCompanyAfterSelectHooks = append(researchCompanyAfterSelectHooks, researchCompanyHook) + case boil.BeforeInsertHook: + researchCompanyBeforeInsertHooks = append(researchCompanyBeforeInsertHooks, researchCompanyHook) + case boil.AfterInsertHook: + researchCompanyAfterInsertHooks = append(researchCompanyAfterInsertHooks, researchCompanyHook) + case boil.BeforeUpdateHook: + researchCompanyBeforeUpdateHooks = append(researchCompanyBeforeUpdateHooks, researchCompanyHook) + case boil.AfterUpdateHook: + researchCompanyAfterUpdateHooks = append(researchCompanyAfterUpdateHooks, researchCompanyHook) + case boil.BeforeDeleteHook: + researchCompanyBeforeDeleteHooks = append(researchCompanyBeforeDeleteHooks, researchCompanyHook) + case boil.AfterDeleteHook: + researchCompanyAfterDeleteHooks = append(researchCompanyAfterDeleteHooks, researchCompanyHook) + case boil.BeforeUpsertHook: + researchCompanyBeforeUpsertHooks = append(researchCompanyBeforeUpsertHooks, researchCompanyHook) + case boil.AfterUpsertHook: + researchCompanyAfterUpsertHooks = append(researchCompanyAfterUpsertHooks, researchCompanyHook) + } +} + +// One returns a single researchCompany record from the query. +func (q researchCompanyQuery) One(ctx context.Context, exec boil.ContextExecutor) (*ResearchCompany, error) { + o := &ResearchCompany{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for research_companies") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all ResearchCompany records from the query. +func (q researchCompanyQuery) All(ctx context.Context, exec boil.ContextExecutor) (ResearchCompanySlice, error) { + var o []*ResearchCompany + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to ResearchCompany slice") + } + + if len(researchCompanyAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all ResearchCompany records in the query. +func (q researchCompanyQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count research_companies rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q researchCompanyQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if research_companies exists") + } + + return count > 0, nil +} + +// ResStem pointed to by the foreign key. +func (o *ResearchCompany) ResStem(mods ...qm.QueryMod) researchStemQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"res_stem_id\" = ?", o.ResStemID), + } + + queryMods = append(queryMods, mods...) + + return ResearchStems(queryMods...) +} + +// LoadResStem allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (researchCompanyL) LoadResStem(ctx context.Context, e boil.ContextExecutor, singular bool, maybeResearchCompany interface{}, mods queries.Applicator) error { + var slice []*ResearchCompany + var object *ResearchCompany + + if singular { + object = maybeResearchCompany.(*ResearchCompany) + } else { + slice = *maybeResearchCompany.(*[]*ResearchCompany) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &researchCompanyR{} + } + if !queries.IsNil(object.ResStemID) { + args = append(args, object.ResStemID) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &researchCompanyR{} + } + + for _, a := range args { + if queries.Equal(a, obj.ResStemID) { + continue Outer + } + } + + if !queries.IsNil(obj.ResStemID) { + args = append(args, obj.ResStemID) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`research_stem`), + qm.WhereIn(`research_stem.res_stem_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load ResearchStem") + } + + var resultSlice []*ResearchStem + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice ResearchStem") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for research_stem") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for research_stem") + } + + if len(researchCompanyAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.ResStem = foreign + if foreign.R == nil { + foreign.R = &researchStemR{} + } + foreign.R.ResStemResearchCompanies = append(foreign.R.ResStemResearchCompanies, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.ResStemID, foreign.ResStemID) { + local.R.ResStem = foreign + if foreign.R == nil { + foreign.R = &researchStemR{} + } + foreign.R.ResStemResearchCompanies = append(foreign.R.ResStemResearchCompanies, local) + break + } + } + } + + return nil +} + +// SetResStem of the researchCompany to the related item. +// Sets o.R.ResStem to related. +// Adds o to related.R.ResStemResearchCompanies. +func (o *ResearchCompany) SetResStem(ctx context.Context, exec boil.ContextExecutor, insert bool, related *ResearchStem) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"research_companies\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"res_stem_id"}), + strmangle.WhereClause("\"", "\"", 0, researchCompanyPrimaryKeyColumns), + ) + values := []interface{}{related.ResStemID, o.CoStemID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.ResStemID, related.ResStemID) + if o.R == nil { + o.R = &researchCompanyR{ + ResStem: related, + } + } else { + o.R.ResStem = related + } + + if related.R == nil { + related.R = &researchStemR{ + ResStemResearchCompanies: ResearchCompanySlice{o}, + } + } else { + related.R.ResStemResearchCompanies = append(related.R.ResStemResearchCompanies, o) + } + + return nil +} + +// RemoveResStem relationship. +// Sets o.R.ResStem to nil. +// Removes o from all passed in related items' relationships struct. +func (o *ResearchCompany) RemoveResStem(ctx context.Context, exec boil.ContextExecutor, related *ResearchStem) error { + var err error + + queries.SetScanner(&o.ResStemID, nil) + if _, err = o.Update(ctx, exec, boil.Whitelist("res_stem_id")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.ResStem = nil + } + if related == nil || related.R == nil { + return nil + } + + for i, ri := range related.R.ResStemResearchCompanies { + if queries.Equal(o.ResStemID, ri.ResStemID) { + continue + } + + ln := len(related.R.ResStemResearchCompanies) + if ln > 1 && i < ln-1 { + related.R.ResStemResearchCompanies[i] = related.R.ResStemResearchCompanies[ln-1] + } + related.R.ResStemResearchCompanies = related.R.ResStemResearchCompanies[:ln-1] + break + } + return nil +} + +// ResearchCompanies retrieves all the records using an executor. +func ResearchCompanies(mods ...qm.QueryMod) researchCompanyQuery { + mods = append(mods, qm.From("\"research_companies\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"research_companies\".*"}) + } + + return researchCompanyQuery{q} +} + +// FindResearchCompany retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindResearchCompany(ctx context.Context, exec boil.ContextExecutor, coStemID int64, selectCols ...string) (*ResearchCompany, error) { + researchCompanyObj := &ResearchCompany{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"research_companies\" where \"co_stem_id\"=?", sel, + ) + + q := queries.Raw(query, coStemID) + + err := q.Bind(ctx, exec, researchCompanyObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from research_companies") + } + + if err = researchCompanyObj.doAfterSelectHooks(ctx, exec); err != nil { + return researchCompanyObj, err + } + + return researchCompanyObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *ResearchCompany) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no research_companies provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(researchCompanyColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + researchCompanyInsertCacheMut.RLock() + cache, cached := researchCompanyInsertCache[key] + researchCompanyInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + researchCompanyAllColumns, + researchCompanyColumnsWithDefault, + researchCompanyColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(researchCompanyType, researchCompanyMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(researchCompanyType, researchCompanyMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"research_companies\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"research_companies\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into research_companies") + } + + if !cached { + researchCompanyInsertCacheMut.Lock() + researchCompanyInsertCache[key] = cache + researchCompanyInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the ResearchCompany. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *ResearchCompany) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + researchCompanyUpdateCacheMut.RLock() + cache, cached := researchCompanyUpdateCache[key] + researchCompanyUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + researchCompanyAllColumns, + researchCompanyPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update research_companies, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"research_companies\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, researchCompanyPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(researchCompanyType, researchCompanyMapping, append(wl, researchCompanyPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update research_companies row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for research_companies") + } + + if !cached { + researchCompanyUpdateCacheMut.Lock() + researchCompanyUpdateCache[key] = cache + researchCompanyUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q researchCompanyQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for research_companies") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for research_companies") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o ResearchCompanySlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), researchCompanyPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"research_companies\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, researchCompanyPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in researchCompany slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all researchCompany") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *ResearchCompany) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no research_companies provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(researchCompanyColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + researchCompanyUpsertCacheMut.RLock() + cache, cached := researchCompanyUpsertCache[key] + researchCompanyUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + researchCompanyAllColumns, + researchCompanyColumnsWithDefault, + researchCompanyColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + researchCompanyAllColumns, + researchCompanyPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert research_companies, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(researchCompanyPrimaryKeyColumns)) + copy(conflict, researchCompanyPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"research_companies\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(researchCompanyType, researchCompanyMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(researchCompanyType, researchCompanyMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert research_companies") + } + + if !cached { + researchCompanyUpsertCacheMut.Lock() + researchCompanyUpsertCache[key] = cache + researchCompanyUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single ResearchCompany record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *ResearchCompany) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no ResearchCompany provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), researchCompanyPrimaryKeyMapping) + sql := "DELETE FROM \"research_companies\" WHERE \"co_stem_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from research_companies") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for research_companies") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q researchCompanyQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no researchCompanyQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from research_companies") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for research_companies") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o ResearchCompanySlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(researchCompanyBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), researchCompanyPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"research_companies\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, researchCompanyPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from researchCompany slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for research_companies") + } + + if len(researchCompanyAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *ResearchCompany) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindResearchCompany(ctx, exec, o.CoStemID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *ResearchCompanySlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := ResearchCompanySlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), researchCompanyPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"research_companies\".* FROM \"research_companies\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, researchCompanyPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in ResearchCompanySlice") + } + + *o = slice + + return nil +} + +// ResearchCompanyExists checks if the ResearchCompany row exists. +func ResearchCompanyExists(ctx context.Context, exec boil.ContextExecutor, coStemID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"research_companies\" where \"co_stem_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, coStemID) + } + row := exec.QueryRowContext(ctx, sql, coStemID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if research_companies exists") + } + + return exists, nil +} diff --git a/models/research_stem.go b/models/research_stem.go new file mode 100644 index 0000000..af52f5d --- /dev/null +++ b/models/research_stem.go @@ -0,0 +1,1389 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// ResearchStem is an object representing the database table. +type ResearchStem struct { + ResStemID int64 `boil:"res_stem_id" json:"res_stem_id" toml:"res_stem_id" yaml:"res_stem_id"` + ResearchStem null.String `boil:"research_stem" json:"research_stem,omitempty" toml:"research_stem" yaml:"research_stem,omitempty"` + + R *researchStemR `boil:"-" json:"-" toml:"-" yaml:"-"` + L researchStemL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var ResearchStemColumns = struct { + ResStemID string + ResearchStem string +}{ + ResStemID: "res_stem_id", + ResearchStem: "research_stem", +} + +var ResearchStemTableColumns = struct { + ResStemID string + ResearchStem string +}{ + ResStemID: "research_stem.res_stem_id", + ResearchStem: "research_stem.research_stem", +} + +// Generated where + +var ResearchStemWhere = struct { + ResStemID whereHelperint64 + ResearchStem whereHelpernull_String +}{ + ResStemID: whereHelperint64{field: "\"research_stem\".\"res_stem_id\""}, + ResearchStem: whereHelpernull_String{field: "\"research_stem\".\"research_stem\""}, +} + +// ResearchStemRels is where relationship names are stored. +var ResearchStemRels = struct { + ResStemMoleculeSynonyms string + ResStemResearchCompanies string +}{ + ResStemMoleculeSynonyms: "ResStemMoleculeSynonyms", + ResStemResearchCompanies: "ResStemResearchCompanies", +} + +// researchStemR is where relationships are stored. +type researchStemR struct { + ResStemMoleculeSynonyms MoleculeSynonymSlice `boil:"ResStemMoleculeSynonyms" json:"ResStemMoleculeSynonyms" toml:"ResStemMoleculeSynonyms" yaml:"ResStemMoleculeSynonyms"` + ResStemResearchCompanies ResearchCompanySlice `boil:"ResStemResearchCompanies" json:"ResStemResearchCompanies" toml:"ResStemResearchCompanies" yaml:"ResStemResearchCompanies"` +} + +// NewStruct creates a new relationship struct +func (*researchStemR) NewStruct() *researchStemR { + return &researchStemR{} +} + +func (r *researchStemR) GetResStemMoleculeSynonyms() MoleculeSynonymSlice { + if r == nil { + return nil + } + return r.ResStemMoleculeSynonyms +} + +func (r *researchStemR) GetResStemResearchCompanies() ResearchCompanySlice { + if r == nil { + return nil + } + return r.ResStemResearchCompanies +} + +// researchStemL is where Load methods for each relationship are stored. +type researchStemL struct{} + +var ( + researchStemAllColumns = []string{"res_stem_id", "research_stem"} + researchStemColumnsWithoutDefault = []string{"res_stem_id"} + researchStemColumnsWithDefault = []string{"research_stem"} + researchStemPrimaryKeyColumns = []string{"res_stem_id"} + researchStemGeneratedColumns = []string{} +) + +type ( + // ResearchStemSlice is an alias for a slice of pointers to ResearchStem. + // This should almost always be used instead of []ResearchStem. + ResearchStemSlice []*ResearchStem + // ResearchStemHook is the signature for custom ResearchStem hook methods + ResearchStemHook func(context.Context, boil.ContextExecutor, *ResearchStem) error + + researchStemQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + researchStemType = reflect.TypeOf(&ResearchStem{}) + researchStemMapping = queries.MakeStructMapping(researchStemType) + researchStemPrimaryKeyMapping, _ = queries.BindMapping(researchStemType, researchStemMapping, researchStemPrimaryKeyColumns) + researchStemInsertCacheMut sync.RWMutex + researchStemInsertCache = make(map[string]insertCache) + researchStemUpdateCacheMut sync.RWMutex + researchStemUpdateCache = make(map[string]updateCache) + researchStemUpsertCacheMut sync.RWMutex + researchStemUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var researchStemAfterSelectHooks []ResearchStemHook + +var researchStemBeforeInsertHooks []ResearchStemHook +var researchStemAfterInsertHooks []ResearchStemHook + +var researchStemBeforeUpdateHooks []ResearchStemHook +var researchStemAfterUpdateHooks []ResearchStemHook + +var researchStemBeforeDeleteHooks []ResearchStemHook +var researchStemAfterDeleteHooks []ResearchStemHook + +var researchStemBeforeUpsertHooks []ResearchStemHook +var researchStemAfterUpsertHooks []ResearchStemHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *ResearchStem) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range researchStemAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *ResearchStem) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range researchStemBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *ResearchStem) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range researchStemAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *ResearchStem) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range researchStemBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *ResearchStem) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range researchStemAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *ResearchStem) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range researchStemBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *ResearchStem) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range researchStemAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *ResearchStem) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range researchStemBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *ResearchStem) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range researchStemAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddResearchStemHook registers your hook function for all future operations. +func AddResearchStemHook(hookPoint boil.HookPoint, researchStemHook ResearchStemHook) { + switch hookPoint { + case boil.AfterSelectHook: + researchStemAfterSelectHooks = append(researchStemAfterSelectHooks, researchStemHook) + case boil.BeforeInsertHook: + researchStemBeforeInsertHooks = append(researchStemBeforeInsertHooks, researchStemHook) + case boil.AfterInsertHook: + researchStemAfterInsertHooks = append(researchStemAfterInsertHooks, researchStemHook) + case boil.BeforeUpdateHook: + researchStemBeforeUpdateHooks = append(researchStemBeforeUpdateHooks, researchStemHook) + case boil.AfterUpdateHook: + researchStemAfterUpdateHooks = append(researchStemAfterUpdateHooks, researchStemHook) + case boil.BeforeDeleteHook: + researchStemBeforeDeleteHooks = append(researchStemBeforeDeleteHooks, researchStemHook) + case boil.AfterDeleteHook: + researchStemAfterDeleteHooks = append(researchStemAfterDeleteHooks, researchStemHook) + case boil.BeforeUpsertHook: + researchStemBeforeUpsertHooks = append(researchStemBeforeUpsertHooks, researchStemHook) + case boil.AfterUpsertHook: + researchStemAfterUpsertHooks = append(researchStemAfterUpsertHooks, researchStemHook) + } +} + +// One returns a single researchStem record from the query. +func (q researchStemQuery) One(ctx context.Context, exec boil.ContextExecutor) (*ResearchStem, error) { + o := &ResearchStem{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for research_stem") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all ResearchStem records from the query. +func (q researchStemQuery) All(ctx context.Context, exec boil.ContextExecutor) (ResearchStemSlice, error) { + var o []*ResearchStem + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to ResearchStem slice") + } + + if len(researchStemAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all ResearchStem records in the query. +func (q researchStemQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count research_stem rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q researchStemQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if research_stem exists") + } + + return count > 0, nil +} + +// ResStemMoleculeSynonyms retrieves all the molecule_synonym's MoleculeSynonyms with an executor via res_stem_id column. +func (o *ResearchStem) ResStemMoleculeSynonyms(mods ...qm.QueryMod) moleculeSynonymQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"molecule_synonyms\".\"res_stem_id\"=?", o.ResStemID), + ) + + return MoleculeSynonyms(queryMods...) +} + +// ResStemResearchCompanies retrieves all the research_company's ResearchCompanies with an executor via res_stem_id column. +func (o *ResearchStem) ResStemResearchCompanies(mods ...qm.QueryMod) researchCompanyQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"research_companies\".\"res_stem_id\"=?", o.ResStemID), + ) + + return ResearchCompanies(queryMods...) +} + +// LoadResStemMoleculeSynonyms allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (researchStemL) LoadResStemMoleculeSynonyms(ctx context.Context, e boil.ContextExecutor, singular bool, maybeResearchStem interface{}, mods queries.Applicator) error { + var slice []*ResearchStem + var object *ResearchStem + + if singular { + object = maybeResearchStem.(*ResearchStem) + } else { + slice = *maybeResearchStem.(*[]*ResearchStem) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &researchStemR{} + } + args = append(args, object.ResStemID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &researchStemR{} + } + + for _, a := range args { + if queries.Equal(a, obj.ResStemID) { + continue Outer + } + } + + args = append(args, obj.ResStemID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`molecule_synonyms`), + qm.WhereIn(`molecule_synonyms.res_stem_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load molecule_synonyms") + } + + var resultSlice []*MoleculeSynonym + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice molecule_synonyms") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on molecule_synonyms") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for molecule_synonyms") + } + + if len(moleculeSynonymAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.ResStemMoleculeSynonyms = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &moleculeSynonymR{} + } + foreign.R.ResStem = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.ResStemID, foreign.ResStemID) { + local.R.ResStemMoleculeSynonyms = append(local.R.ResStemMoleculeSynonyms, foreign) + if foreign.R == nil { + foreign.R = &moleculeSynonymR{} + } + foreign.R.ResStem = local + break + } + } + } + + return nil +} + +// LoadResStemResearchCompanies allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (researchStemL) LoadResStemResearchCompanies(ctx context.Context, e boil.ContextExecutor, singular bool, maybeResearchStem interface{}, mods queries.Applicator) error { + var slice []*ResearchStem + var object *ResearchStem + + if singular { + object = maybeResearchStem.(*ResearchStem) + } else { + slice = *maybeResearchStem.(*[]*ResearchStem) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &researchStemR{} + } + args = append(args, object.ResStemID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &researchStemR{} + } + + for _, a := range args { + if queries.Equal(a, obj.ResStemID) { + continue Outer + } + } + + args = append(args, obj.ResStemID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`research_companies`), + qm.WhereIn(`research_companies.res_stem_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load research_companies") + } + + var resultSlice []*ResearchCompany + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice research_companies") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on research_companies") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for research_companies") + } + + if len(researchCompanyAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.ResStemResearchCompanies = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &researchCompanyR{} + } + foreign.R.ResStem = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.ResStemID, foreign.ResStemID) { + local.R.ResStemResearchCompanies = append(local.R.ResStemResearchCompanies, foreign) + if foreign.R == nil { + foreign.R = &researchCompanyR{} + } + foreign.R.ResStem = local + break + } + } + } + + return nil +} + +// AddResStemMoleculeSynonyms adds the given related objects to the existing relationships +// of the research_stem, optionally inserting them as new records. +// Appends related to o.R.ResStemMoleculeSynonyms. +// Sets related.R.ResStem appropriately. +func (o *ResearchStem) AddResStemMoleculeSynonyms(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*MoleculeSynonym) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.ResStemID, o.ResStemID) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"molecule_synonyms\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"res_stem_id"}), + strmangle.WhereClause("\"", "\"", 0, moleculeSynonymPrimaryKeyColumns), + ) + values := []interface{}{o.ResStemID, rel.MolsynID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.ResStemID, o.ResStemID) + } + } + + if o.R == nil { + o.R = &researchStemR{ + ResStemMoleculeSynonyms: related, + } + } else { + o.R.ResStemMoleculeSynonyms = append(o.R.ResStemMoleculeSynonyms, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &moleculeSynonymR{ + ResStem: o, + } + } else { + rel.R.ResStem = o + } + } + return nil +} + +// SetResStemMoleculeSynonyms removes all previously related items of the +// research_stem replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.ResStem's ResStemMoleculeSynonyms accordingly. +// Replaces o.R.ResStemMoleculeSynonyms with related. +// Sets related.R.ResStem's ResStemMoleculeSynonyms accordingly. +func (o *ResearchStem) SetResStemMoleculeSynonyms(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*MoleculeSynonym) error { + query := "update \"molecule_synonyms\" set \"res_stem_id\" = null where \"res_stem_id\" = ?" + values := []interface{}{o.ResStemID} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + if o.R != nil { + for _, rel := range o.R.ResStemMoleculeSynonyms { + queries.SetScanner(&rel.ResStemID, nil) + if rel.R == nil { + continue + } + + rel.R.ResStem = nil + } + o.R.ResStemMoleculeSynonyms = nil + } + + return o.AddResStemMoleculeSynonyms(ctx, exec, insert, related...) +} + +// RemoveResStemMoleculeSynonyms relationships from objects passed in. +// Removes related items from R.ResStemMoleculeSynonyms (uses pointer comparison, removal does not keep order) +// Sets related.R.ResStem. +func (o *ResearchStem) RemoveResStemMoleculeSynonyms(ctx context.Context, exec boil.ContextExecutor, related ...*MoleculeSynonym) error { + if len(related) == 0 { + return nil + } + + var err error + for _, rel := range related { + queries.SetScanner(&rel.ResStemID, nil) + if rel.R != nil { + rel.R.ResStem = nil + } + if _, err = rel.Update(ctx, exec, boil.Whitelist("res_stem_id")); err != nil { + return err + } + } + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.ResStemMoleculeSynonyms { + if rel != ri { + continue + } + + ln := len(o.R.ResStemMoleculeSynonyms) + if ln > 1 && i < ln-1 { + o.R.ResStemMoleculeSynonyms[i] = o.R.ResStemMoleculeSynonyms[ln-1] + } + o.R.ResStemMoleculeSynonyms = o.R.ResStemMoleculeSynonyms[:ln-1] + break + } + } + + return nil +} + +// AddResStemResearchCompanies adds the given related objects to the existing relationships +// of the research_stem, optionally inserting them as new records. +// Appends related to o.R.ResStemResearchCompanies. +// Sets related.R.ResStem appropriately. +func (o *ResearchStem) AddResStemResearchCompanies(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*ResearchCompany) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.ResStemID, o.ResStemID) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"research_companies\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"res_stem_id"}), + strmangle.WhereClause("\"", "\"", 0, researchCompanyPrimaryKeyColumns), + ) + values := []interface{}{o.ResStemID, rel.CoStemID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.ResStemID, o.ResStemID) + } + } + + if o.R == nil { + o.R = &researchStemR{ + ResStemResearchCompanies: related, + } + } else { + o.R.ResStemResearchCompanies = append(o.R.ResStemResearchCompanies, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &researchCompanyR{ + ResStem: o, + } + } else { + rel.R.ResStem = o + } + } + return nil +} + +// SetResStemResearchCompanies removes all previously related items of the +// research_stem replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.ResStem's ResStemResearchCompanies accordingly. +// Replaces o.R.ResStemResearchCompanies with related. +// Sets related.R.ResStem's ResStemResearchCompanies accordingly. +func (o *ResearchStem) SetResStemResearchCompanies(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*ResearchCompany) error { + query := "update \"research_companies\" set \"res_stem_id\" = null where \"res_stem_id\" = ?" + values := []interface{}{o.ResStemID} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + if o.R != nil { + for _, rel := range o.R.ResStemResearchCompanies { + queries.SetScanner(&rel.ResStemID, nil) + if rel.R == nil { + continue + } + + rel.R.ResStem = nil + } + o.R.ResStemResearchCompanies = nil + } + + return o.AddResStemResearchCompanies(ctx, exec, insert, related...) +} + +// RemoveResStemResearchCompanies relationships from objects passed in. +// Removes related items from R.ResStemResearchCompanies (uses pointer comparison, removal does not keep order) +// Sets related.R.ResStem. +func (o *ResearchStem) RemoveResStemResearchCompanies(ctx context.Context, exec boil.ContextExecutor, related ...*ResearchCompany) error { + if len(related) == 0 { + return nil + } + + var err error + for _, rel := range related { + queries.SetScanner(&rel.ResStemID, nil) + if rel.R != nil { + rel.R.ResStem = nil + } + if _, err = rel.Update(ctx, exec, boil.Whitelist("res_stem_id")); err != nil { + return err + } + } + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.ResStemResearchCompanies { + if rel != ri { + continue + } + + ln := len(o.R.ResStemResearchCompanies) + if ln > 1 && i < ln-1 { + o.R.ResStemResearchCompanies[i] = o.R.ResStemResearchCompanies[ln-1] + } + o.R.ResStemResearchCompanies = o.R.ResStemResearchCompanies[:ln-1] + break + } + } + + return nil +} + +// ResearchStems retrieves all the records using an executor. +func ResearchStems(mods ...qm.QueryMod) researchStemQuery { + mods = append(mods, qm.From("\"research_stem\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"research_stem\".*"}) + } + + return researchStemQuery{q} +} + +// FindResearchStem retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindResearchStem(ctx context.Context, exec boil.ContextExecutor, resStemID int64, selectCols ...string) (*ResearchStem, error) { + researchStemObj := &ResearchStem{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"research_stem\" where \"res_stem_id\"=?", sel, + ) + + q := queries.Raw(query, resStemID) + + err := q.Bind(ctx, exec, researchStemObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from research_stem") + } + + if err = researchStemObj.doAfterSelectHooks(ctx, exec); err != nil { + return researchStemObj, err + } + + return researchStemObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *ResearchStem) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no research_stem provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(researchStemColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + researchStemInsertCacheMut.RLock() + cache, cached := researchStemInsertCache[key] + researchStemInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + researchStemAllColumns, + researchStemColumnsWithDefault, + researchStemColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(researchStemType, researchStemMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(researchStemType, researchStemMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"research_stem\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"research_stem\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into research_stem") + } + + if !cached { + researchStemInsertCacheMut.Lock() + researchStemInsertCache[key] = cache + researchStemInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the ResearchStem. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *ResearchStem) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + researchStemUpdateCacheMut.RLock() + cache, cached := researchStemUpdateCache[key] + researchStemUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + researchStemAllColumns, + researchStemPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update research_stem, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"research_stem\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, researchStemPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(researchStemType, researchStemMapping, append(wl, researchStemPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update research_stem row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for research_stem") + } + + if !cached { + researchStemUpdateCacheMut.Lock() + researchStemUpdateCache[key] = cache + researchStemUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q researchStemQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for research_stem") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for research_stem") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o ResearchStemSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), researchStemPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"research_stem\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, researchStemPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in researchStem slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all researchStem") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *ResearchStem) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no research_stem provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(researchStemColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + researchStemUpsertCacheMut.RLock() + cache, cached := researchStemUpsertCache[key] + researchStemUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + researchStemAllColumns, + researchStemColumnsWithDefault, + researchStemColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + researchStemAllColumns, + researchStemPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert research_stem, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(researchStemPrimaryKeyColumns)) + copy(conflict, researchStemPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"research_stem\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(researchStemType, researchStemMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(researchStemType, researchStemMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert research_stem") + } + + if !cached { + researchStemUpsertCacheMut.Lock() + researchStemUpsertCache[key] = cache + researchStemUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single ResearchStem record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *ResearchStem) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no ResearchStem provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), researchStemPrimaryKeyMapping) + sql := "DELETE FROM \"research_stem\" WHERE \"res_stem_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from research_stem") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for research_stem") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q researchStemQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no researchStemQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from research_stem") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for research_stem") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o ResearchStemSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(researchStemBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), researchStemPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"research_stem\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, researchStemPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from researchStem slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for research_stem") + } + + if len(researchStemAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *ResearchStem) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindResearchStem(ctx, exec, o.ResStemID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *ResearchStemSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := ResearchStemSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), researchStemPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"research_stem\".* FROM \"research_stem\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, researchStemPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in ResearchStemSlice") + } + + *o = slice + + return nil +} + +// ResearchStemExists checks if the ResearchStem row exists. +func ResearchStemExists(ctx context.Context, exec boil.ContextExecutor, resStemID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"research_stem\" where \"res_stem_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, resStemID) + } + row := exec.QueryRowContext(ctx, sql, resStemID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if research_stem exists") + } + + return exists, nil +} diff --git a/models/seqhash.go b/models/seqhash.go new file mode 100644 index 0000000..f32bfec --- /dev/null +++ b/models/seqhash.go @@ -0,0 +1,2012 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// Seqhash is an object representing the database table. +type Seqhash struct { + Seqhash string `boil:"seqhash" json:"seqhash" toml:"seqhash" yaml:"seqhash"` + Sequence string `boil:"sequence" json:"sequence" toml:"sequence" yaml:"sequence"` + Circular string `boil:"circular" json:"circular" toml:"circular" yaml:"circular"` + Doublestranded string `boil:"doublestranded" json:"doublestranded" toml:"doublestranded" yaml:"doublestranded"` + Seqhashtype string `boil:"seqhashtype" json:"seqhashtype" toml:"seqhashtype" yaml:"seqhashtype"` + Translation null.String `boil:"translation" json:"translation,omitempty" toml:"translation" yaml:"translation,omitempty"` + + R *seqhashR `boil:"-" json:"-" toml:"-" yaml:"-"` + L seqhashL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var SeqhashColumns = struct { + Seqhash string + Sequence string + Circular string + Doublestranded string + Seqhashtype string + Translation string +}{ + Seqhash: "seqhash", + Sequence: "sequence", + Circular: "circular", + Doublestranded: "doublestranded", + Seqhashtype: "seqhashtype", + Translation: "translation", +} + +var SeqhashTableColumns = struct { + Seqhash string + Sequence string + Circular string + Doublestranded string + Seqhashtype string + Translation string +}{ + Seqhash: "seqhash.seqhash", + Sequence: "seqhash.sequence", + Circular: "seqhash.circular", + Doublestranded: "seqhash.doublestranded", + Seqhashtype: "seqhash.seqhashtype", + Translation: "seqhash.translation", +} + +// Generated where + +var SeqhashWhere = struct { + Seqhash whereHelperstring + Sequence whereHelperstring + Circular whereHelperstring + Doublestranded whereHelperstring + Seqhashtype whereHelperstring + Translation whereHelpernull_String +}{ + Seqhash: whereHelperstring{field: "\"seqhash\".\"seqhash\""}, + Sequence: whereHelperstring{field: "\"seqhash\".\"sequence\""}, + Circular: whereHelperstring{field: "\"seqhash\".\"circular\""}, + Doublestranded: whereHelperstring{field: "\"seqhash\".\"doublestranded\""}, + Seqhashtype: whereHelperstring{field: "\"seqhash\".\"seqhashtype\""}, + Translation: whereHelpernull_String{field: "\"seqhash\".\"translation\""}, +} + +// SeqhashRels is where relationship names are stored. +var SeqhashRels = struct { + TranslationSeqhash string + Genbanks string + ParentGenbanks string + TranslationSeqhashes string + Uniprots string +}{ + TranslationSeqhash: "TranslationSeqhash", + Genbanks: "Genbanks", + ParentGenbanks: "ParentGenbanks", + TranslationSeqhashes: "TranslationSeqhashes", + Uniprots: "Uniprots", +} + +// seqhashR is where relationships are stored. +type seqhashR struct { + TranslationSeqhash *Seqhash `boil:"TranslationSeqhash" json:"TranslationSeqhash" toml:"TranslationSeqhash" yaml:"TranslationSeqhash"` + Genbanks GenbankSlice `boil:"Genbanks" json:"Genbanks" toml:"Genbanks" yaml:"Genbanks"` + ParentGenbanks GenbankSlice `boil:"ParentGenbanks" json:"ParentGenbanks" toml:"ParentGenbanks" yaml:"ParentGenbanks"` + TranslationSeqhashes SeqhashSlice `boil:"TranslationSeqhashes" json:"TranslationSeqhashes" toml:"TranslationSeqhashes" yaml:"TranslationSeqhashes"` + Uniprots UniprotSlice `boil:"Uniprots" json:"Uniprots" toml:"Uniprots" yaml:"Uniprots"` +} + +// NewStruct creates a new relationship struct +func (*seqhashR) NewStruct() *seqhashR { + return &seqhashR{} +} + +func (r *seqhashR) GetTranslationSeqhash() *Seqhash { + if r == nil { + return nil + } + return r.TranslationSeqhash +} + +func (r *seqhashR) GetGenbanks() GenbankSlice { + if r == nil { + return nil + } + return r.Genbanks +} + +func (r *seqhashR) GetParentGenbanks() GenbankSlice { + if r == nil { + return nil + } + return r.ParentGenbanks +} + +func (r *seqhashR) GetTranslationSeqhashes() SeqhashSlice { + if r == nil { + return nil + } + return r.TranslationSeqhashes +} + +func (r *seqhashR) GetUniprots() UniprotSlice { + if r == nil { + return nil + } + return r.Uniprots +} + +// seqhashL is where Load methods for each relationship are stored. +type seqhashL struct{} + +var ( + seqhashAllColumns = []string{"seqhash", "sequence", "circular", "doublestranded", "seqhashtype", "translation"} + seqhashColumnsWithoutDefault = []string{"seqhash", "sequence", "seqhashtype"} + seqhashColumnsWithDefault = []string{"circular", "doublestranded", "translation"} + seqhashPrimaryKeyColumns = []string{"seqhash"} + seqhashGeneratedColumns = []string{} +) + +type ( + // SeqhashSlice is an alias for a slice of pointers to Seqhash. + // This should almost always be used instead of []Seqhash. + SeqhashSlice []*Seqhash + // SeqhashHook is the signature for custom Seqhash hook methods + SeqhashHook func(context.Context, boil.ContextExecutor, *Seqhash) error + + seqhashQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + seqhashType = reflect.TypeOf(&Seqhash{}) + seqhashMapping = queries.MakeStructMapping(seqhashType) + seqhashPrimaryKeyMapping, _ = queries.BindMapping(seqhashType, seqhashMapping, seqhashPrimaryKeyColumns) + seqhashInsertCacheMut sync.RWMutex + seqhashInsertCache = make(map[string]insertCache) + seqhashUpdateCacheMut sync.RWMutex + seqhashUpdateCache = make(map[string]updateCache) + seqhashUpsertCacheMut sync.RWMutex + seqhashUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var seqhashAfterSelectHooks []SeqhashHook + +var seqhashBeforeInsertHooks []SeqhashHook +var seqhashAfterInsertHooks []SeqhashHook + +var seqhashBeforeUpdateHooks []SeqhashHook +var seqhashAfterUpdateHooks []SeqhashHook + +var seqhashBeforeDeleteHooks []SeqhashHook +var seqhashAfterDeleteHooks []SeqhashHook + +var seqhashBeforeUpsertHooks []SeqhashHook +var seqhashAfterUpsertHooks []SeqhashHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *Seqhash) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range seqhashAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *Seqhash) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range seqhashBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *Seqhash) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range seqhashAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *Seqhash) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range seqhashBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *Seqhash) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range seqhashAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *Seqhash) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range seqhashBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *Seqhash) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range seqhashAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *Seqhash) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range seqhashBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *Seqhash) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range seqhashAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddSeqhashHook registers your hook function for all future operations. +func AddSeqhashHook(hookPoint boil.HookPoint, seqhashHook SeqhashHook) { + switch hookPoint { + case boil.AfterSelectHook: + seqhashAfterSelectHooks = append(seqhashAfterSelectHooks, seqhashHook) + case boil.BeforeInsertHook: + seqhashBeforeInsertHooks = append(seqhashBeforeInsertHooks, seqhashHook) + case boil.AfterInsertHook: + seqhashAfterInsertHooks = append(seqhashAfterInsertHooks, seqhashHook) + case boil.BeforeUpdateHook: + seqhashBeforeUpdateHooks = append(seqhashBeforeUpdateHooks, seqhashHook) + case boil.AfterUpdateHook: + seqhashAfterUpdateHooks = append(seqhashAfterUpdateHooks, seqhashHook) + case boil.BeforeDeleteHook: + seqhashBeforeDeleteHooks = append(seqhashBeforeDeleteHooks, seqhashHook) + case boil.AfterDeleteHook: + seqhashAfterDeleteHooks = append(seqhashAfterDeleteHooks, seqhashHook) + case boil.BeforeUpsertHook: + seqhashBeforeUpsertHooks = append(seqhashBeforeUpsertHooks, seqhashHook) + case boil.AfterUpsertHook: + seqhashAfterUpsertHooks = append(seqhashAfterUpsertHooks, seqhashHook) + } +} + +// One returns a single seqhash record from the query. +func (q seqhashQuery) One(ctx context.Context, exec boil.ContextExecutor) (*Seqhash, error) { + o := &Seqhash{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for seqhash") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all Seqhash records from the query. +func (q seqhashQuery) All(ctx context.Context, exec boil.ContextExecutor) (SeqhashSlice, error) { + var o []*Seqhash + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to Seqhash slice") + } + + if len(seqhashAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all Seqhash records in the query. +func (q seqhashQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count seqhash rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q seqhashQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if seqhash exists") + } + + return count > 0, nil +} + +// TranslationSeqhash pointed to by the foreign key. +func (o *Seqhash) TranslationSeqhash(mods ...qm.QueryMod) seqhashQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"seqhash\" = ?", o.Translation), + } + + queryMods = append(queryMods, mods...) + + return Seqhashes(queryMods...) +} + +// Genbanks retrieves all the genbank's Genbanks with an executor. +func (o *Seqhash) Genbanks(mods ...qm.QueryMod) genbankQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"genbank\".\"seqhash\"=?", o.Seqhash), + ) + + return Genbanks(queryMods...) +} + +// ParentGenbanks retrieves all the genbank's Genbanks with an executor via accession column. +func (o *Seqhash) ParentGenbanks(mods ...qm.QueryMod) genbankQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.InnerJoin("\"genbank_features\" on \"genbank\".\"accession\" = \"genbank_features\".\"parent\""), + qm.Where("\"genbank_features\".\"seqhash\"=?", o.Seqhash), + ) + + return Genbanks(queryMods...) +} + +// TranslationSeqhashes retrieves all the seqhash's Seqhashes with an executor via translation column. +func (o *Seqhash) TranslationSeqhashes(mods ...qm.QueryMod) seqhashQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"seqhash\".\"translation\"=?", o.Seqhash), + ) + + return Seqhashes(queryMods...) +} + +// Uniprots retrieves all the uniprot's Uniprots with an executor. +func (o *Seqhash) Uniprots(mods ...qm.QueryMod) uniprotQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"uniprot\".\"seqhash\"=?", o.Seqhash), + ) + + return Uniprots(queryMods...) +} + +// LoadTranslationSeqhash allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (seqhashL) LoadTranslationSeqhash(ctx context.Context, e boil.ContextExecutor, singular bool, maybeSeqhash interface{}, mods queries.Applicator) error { + var slice []*Seqhash + var object *Seqhash + + if singular { + object = maybeSeqhash.(*Seqhash) + } else { + slice = *maybeSeqhash.(*[]*Seqhash) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &seqhashR{} + } + if !queries.IsNil(object.Translation) { + args = append(args, object.Translation) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &seqhashR{} + } + + for _, a := range args { + if queries.Equal(a, obj.Translation) { + continue Outer + } + } + + if !queries.IsNil(obj.Translation) { + args = append(args, obj.Translation) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`seqhash`), + qm.WhereIn(`seqhash.seqhash in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load Seqhash") + } + + var resultSlice []*Seqhash + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice Seqhash") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for seqhash") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for seqhash") + } + + if len(seqhashAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.TranslationSeqhash = foreign + if foreign.R == nil { + foreign.R = &seqhashR{} + } + foreign.R.TranslationSeqhashes = append(foreign.R.TranslationSeqhashes, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.Translation, foreign.Seqhash) { + local.R.TranslationSeqhash = foreign + if foreign.R == nil { + foreign.R = &seqhashR{} + } + foreign.R.TranslationSeqhashes = append(foreign.R.TranslationSeqhashes, local) + break + } + } + } + + return nil +} + +// LoadGenbanks allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (seqhashL) LoadGenbanks(ctx context.Context, e boil.ContextExecutor, singular bool, maybeSeqhash interface{}, mods queries.Applicator) error { + var slice []*Seqhash + var object *Seqhash + + if singular { + object = maybeSeqhash.(*Seqhash) + } else { + slice = *maybeSeqhash.(*[]*Seqhash) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &seqhashR{} + } + args = append(args, object.Seqhash) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &seqhashR{} + } + + for _, a := range args { + if a == obj.Seqhash { + continue Outer + } + } + + args = append(args, obj.Seqhash) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`genbank`), + qm.WhereIn(`genbank.seqhash in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load genbank") + } + + var resultSlice []*Genbank + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice genbank") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on genbank") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for genbank") + } + + if len(genbankAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.Genbanks = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &genbankR{} + } + foreign.R.GenbankSeqhash = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.Seqhash == foreign.Seqhash { + local.R.Genbanks = append(local.R.Genbanks, foreign) + if foreign.R == nil { + foreign.R = &genbankR{} + } + foreign.R.GenbankSeqhash = local + break + } + } + } + + return nil +} + +// LoadParentGenbanks allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (seqhashL) LoadParentGenbanks(ctx context.Context, e boil.ContextExecutor, singular bool, maybeSeqhash interface{}, mods queries.Applicator) error { + var slice []*Seqhash + var object *Seqhash + + if singular { + object = maybeSeqhash.(*Seqhash) + } else { + slice = *maybeSeqhash.(*[]*Seqhash) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &seqhashR{} + } + args = append(args, object.Seqhash) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &seqhashR{} + } + + for _, a := range args { + if queries.Equal(a, obj.Seqhash) { + continue Outer + } + } + + args = append(args, obj.Seqhash) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.Select("\"genbank\".\"accession\", \"genbank\".\"seqhash\", \"a\".\"seqhash\""), + qm.From("\"genbank\""), + qm.InnerJoin("\"genbank_features\" as \"a\" on \"genbank\".\"accession\" = \"a\".\"parent\""), + qm.WhereIn("\"a\".\"seqhash\" in ?", args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load genbank") + } + + var resultSlice []*Genbank + + var localJoinCols []string + for results.Next() { + one := new(Genbank) + var localJoinCol string + + err = results.Scan(&one.Accession, &one.Seqhash, &localJoinCol) + if err != nil { + return errors.Wrap(err, "failed to scan eager loaded results for genbank") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "failed to plebian-bind eager loaded slice genbank") + } + + resultSlice = append(resultSlice, one) + localJoinCols = append(localJoinCols, localJoinCol) + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on genbank") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for genbank") + } + + if len(genbankAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.ParentGenbanks = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &genbankR{} + } + foreign.R.Seqhashes = append(foreign.R.Seqhashes, object) + } + return nil + } + + for i, foreign := range resultSlice { + localJoinCol := localJoinCols[i] + for _, local := range slice { + if queries.Equal(local.Seqhash, localJoinCol) { + local.R.ParentGenbanks = append(local.R.ParentGenbanks, foreign) + if foreign.R == nil { + foreign.R = &genbankR{} + } + foreign.R.Seqhashes = append(foreign.R.Seqhashes, local) + break + } + } + } + + return nil +} + +// LoadTranslationSeqhashes allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (seqhashL) LoadTranslationSeqhashes(ctx context.Context, e boil.ContextExecutor, singular bool, maybeSeqhash interface{}, mods queries.Applicator) error { + var slice []*Seqhash + var object *Seqhash + + if singular { + object = maybeSeqhash.(*Seqhash) + } else { + slice = *maybeSeqhash.(*[]*Seqhash) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &seqhashR{} + } + args = append(args, object.Seqhash) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &seqhashR{} + } + + for _, a := range args { + if queries.Equal(a, obj.Seqhash) { + continue Outer + } + } + + args = append(args, obj.Seqhash) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`seqhash`), + qm.WhereIn(`seqhash.translation in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load seqhash") + } + + var resultSlice []*Seqhash + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice seqhash") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on seqhash") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for seqhash") + } + + if len(seqhashAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.TranslationSeqhashes = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &seqhashR{} + } + foreign.R.TranslationSeqhash = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.Seqhash, foreign.Translation) { + local.R.TranslationSeqhashes = append(local.R.TranslationSeqhashes, foreign) + if foreign.R == nil { + foreign.R = &seqhashR{} + } + foreign.R.TranslationSeqhash = local + break + } + } + } + + return nil +} + +// LoadUniprots allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (seqhashL) LoadUniprots(ctx context.Context, e boil.ContextExecutor, singular bool, maybeSeqhash interface{}, mods queries.Applicator) error { + var slice []*Seqhash + var object *Seqhash + + if singular { + object = maybeSeqhash.(*Seqhash) + } else { + slice = *maybeSeqhash.(*[]*Seqhash) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &seqhashR{} + } + args = append(args, object.Seqhash) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &seqhashR{} + } + + for _, a := range args { + if a == obj.Seqhash { + continue Outer + } + } + + args = append(args, obj.Seqhash) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`uniprot`), + qm.WhereIn(`uniprot.seqhash in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load uniprot") + } + + var resultSlice []*Uniprot + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice uniprot") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on uniprot") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for uniprot") + } + + if len(uniprotAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.Uniprots = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &uniprotR{} + } + foreign.R.UniprotSeqhash = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.Seqhash == foreign.Seqhash { + local.R.Uniprots = append(local.R.Uniprots, foreign) + if foreign.R == nil { + foreign.R = &uniprotR{} + } + foreign.R.UniprotSeqhash = local + break + } + } + } + + return nil +} + +// SetTranslationSeqhash of the seqhash to the related item. +// Sets o.R.TranslationSeqhash to related. +// Adds o to related.R.TranslationSeqhashes. +func (o *Seqhash) SetTranslationSeqhash(ctx context.Context, exec boil.ContextExecutor, insert bool, related *Seqhash) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"seqhash\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"translation"}), + strmangle.WhereClause("\"", "\"", 0, seqhashPrimaryKeyColumns), + ) + values := []interface{}{related.Seqhash, o.Seqhash} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.Translation, related.Seqhash) + if o.R == nil { + o.R = &seqhashR{ + TranslationSeqhash: related, + } + } else { + o.R.TranslationSeqhash = related + } + + if related.R == nil { + related.R = &seqhashR{ + TranslationSeqhashes: SeqhashSlice{o}, + } + } else { + related.R.TranslationSeqhashes = append(related.R.TranslationSeqhashes, o) + } + + return nil +} + +// RemoveTranslationSeqhash relationship. +// Sets o.R.TranslationSeqhash to nil. +// Removes o from all passed in related items' relationships struct. +func (o *Seqhash) RemoveTranslationSeqhash(ctx context.Context, exec boil.ContextExecutor, related *Seqhash) error { + var err error + + queries.SetScanner(&o.Translation, nil) + if _, err = o.Update(ctx, exec, boil.Whitelist("translation")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.TranslationSeqhash = nil + } + if related == nil || related.R == nil { + return nil + } + + for i, ri := range related.R.TranslationSeqhashes { + if queries.Equal(o.Translation, ri.Translation) { + continue + } + + ln := len(related.R.TranslationSeqhashes) + if ln > 1 && i < ln-1 { + related.R.TranslationSeqhashes[i] = related.R.TranslationSeqhashes[ln-1] + } + related.R.TranslationSeqhashes = related.R.TranslationSeqhashes[:ln-1] + break + } + return nil +} + +// AddGenbanks adds the given related objects to the existing relationships +// of the seqhash, optionally inserting them as new records. +// Appends related to o.R.Genbanks. +// Sets related.R.GenbankSeqhash appropriately. +func (o *Seqhash) AddGenbanks(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Genbank) error { + var err error + for _, rel := range related { + if insert { + rel.Seqhash = o.Seqhash + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"genbank\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"seqhash"}), + strmangle.WhereClause("\"", "\"", 0, genbankPrimaryKeyColumns), + ) + values := []interface{}{o.Seqhash, rel.Accession} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.Seqhash = o.Seqhash + } + } + + if o.R == nil { + o.R = &seqhashR{ + Genbanks: related, + } + } else { + o.R.Genbanks = append(o.R.Genbanks, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &genbankR{ + GenbankSeqhash: o, + } + } else { + rel.R.GenbankSeqhash = o + } + } + return nil +} + +// AddParentGenbanks adds the given related objects to the existing relationships +// of the seqhash, optionally inserting them as new records. +// Appends related to o.R.ParentGenbanks. +// Sets related.R.Seqhashes appropriately. +func (o *Seqhash) AddParentGenbanks(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Genbank) error { + var err error + for _, rel := range related { + if insert { + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + } + + for _, rel := range related { + query := "insert into \"genbank_features\" (\"seqhash\", \"parent\") values (?, ?)" + values := []interface{}{o.Seqhash, rel.Accession} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err = exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to insert into join table") + } + } + if o.R == nil { + o.R = &seqhashR{ + ParentGenbanks: related, + } + } else { + o.R.ParentGenbanks = append(o.R.ParentGenbanks, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &genbankR{ + Seqhashes: SeqhashSlice{o}, + } + } else { + rel.R.Seqhashes = append(rel.R.Seqhashes, o) + } + } + return nil +} + +// SetParentGenbanks removes all previously related items of the +// seqhash replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.Seqhashes's ParentGenbanks accordingly. +// Replaces o.R.ParentGenbanks with related. +// Sets related.R.Seqhashes's ParentGenbanks accordingly. +func (o *Seqhash) SetParentGenbanks(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Genbank) error { + query := "delete from \"genbank_features\" where \"seqhash\" = ?" + values := []interface{}{o.Seqhash} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + removeParentGenbanksFromSeqhashesSlice(o, related) + if o.R != nil { + o.R.ParentGenbanks = nil + } + + return o.AddParentGenbanks(ctx, exec, insert, related...) +} + +// RemoveParentGenbanks relationships from objects passed in. +// Removes related items from R.ParentGenbanks (uses pointer comparison, removal does not keep order) +// Sets related.R.Seqhashes. +func (o *Seqhash) RemoveParentGenbanks(ctx context.Context, exec boil.ContextExecutor, related ...*Genbank) error { + if len(related) == 0 { + return nil + } + + var err error + query := fmt.Sprintf( + "delete from \"genbank_features\" where \"seqhash\" = ? and \"parent\" in (%s)", + strmangle.Placeholders(dialect.UseIndexPlaceholders, len(related), 2, 1), + ) + values := []interface{}{o.Seqhash} + for _, rel := range related { + values = append(values, rel.Accession) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err = exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + removeParentGenbanksFromSeqhashesSlice(o, related) + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.ParentGenbanks { + if rel != ri { + continue + } + + ln := len(o.R.ParentGenbanks) + if ln > 1 && i < ln-1 { + o.R.ParentGenbanks[i] = o.R.ParentGenbanks[ln-1] + } + o.R.ParentGenbanks = o.R.ParentGenbanks[:ln-1] + break + } + } + + return nil +} + +func removeParentGenbanksFromSeqhashesSlice(o *Seqhash, related []*Genbank) { + for _, rel := range related { + if rel.R == nil { + continue + } + for i, ri := range rel.R.Seqhashes { + if !queries.Equal(o.Seqhash, ri.Seqhash) { + continue + } + + ln := len(rel.R.Seqhashes) + if ln > 1 && i < ln-1 { + rel.R.Seqhashes[i] = rel.R.Seqhashes[ln-1] + } + rel.R.Seqhashes = rel.R.Seqhashes[:ln-1] + break + } + } +} + +// AddTranslationSeqhashes adds the given related objects to the existing relationships +// of the seqhash, optionally inserting them as new records. +// Appends related to o.R.TranslationSeqhashes. +// Sets related.R.TranslationSeqhash appropriately. +func (o *Seqhash) AddTranslationSeqhashes(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Seqhash) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.Translation, o.Seqhash) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"seqhash\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"translation"}), + strmangle.WhereClause("\"", "\"", 0, seqhashPrimaryKeyColumns), + ) + values := []interface{}{o.Seqhash, rel.Seqhash} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.Translation, o.Seqhash) + } + } + + if o.R == nil { + o.R = &seqhashR{ + TranslationSeqhashes: related, + } + } else { + o.R.TranslationSeqhashes = append(o.R.TranslationSeqhashes, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &seqhashR{ + TranslationSeqhash: o, + } + } else { + rel.R.TranslationSeqhash = o + } + } + return nil +} + +// SetTranslationSeqhashes removes all previously related items of the +// seqhash replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.TranslationSeqhash's TranslationSeqhashes accordingly. +// Replaces o.R.TranslationSeqhashes with related. +// Sets related.R.TranslationSeqhash's TranslationSeqhashes accordingly. +func (o *Seqhash) SetTranslationSeqhashes(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Seqhash) error { + query := "update \"seqhash\" set \"translation\" = null where \"translation\" = ?" + values := []interface{}{o.Seqhash} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + if o.R != nil { + for _, rel := range o.R.TranslationSeqhashes { + queries.SetScanner(&rel.Translation, nil) + if rel.R == nil { + continue + } + + rel.R.TranslationSeqhash = nil + } + o.R.TranslationSeqhashes = nil + } + + return o.AddTranslationSeqhashes(ctx, exec, insert, related...) +} + +// RemoveTranslationSeqhashes relationships from objects passed in. +// Removes related items from R.TranslationSeqhashes (uses pointer comparison, removal does not keep order) +// Sets related.R.TranslationSeqhash. +func (o *Seqhash) RemoveTranslationSeqhashes(ctx context.Context, exec boil.ContextExecutor, related ...*Seqhash) error { + if len(related) == 0 { + return nil + } + + var err error + for _, rel := range related { + queries.SetScanner(&rel.Translation, nil) + if rel.R != nil { + rel.R.TranslationSeqhash = nil + } + if _, err = rel.Update(ctx, exec, boil.Whitelist("translation")); err != nil { + return err + } + } + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.TranslationSeqhashes { + if rel != ri { + continue + } + + ln := len(o.R.TranslationSeqhashes) + if ln > 1 && i < ln-1 { + o.R.TranslationSeqhashes[i] = o.R.TranslationSeqhashes[ln-1] + } + o.R.TranslationSeqhashes = o.R.TranslationSeqhashes[:ln-1] + break + } + } + + return nil +} + +// AddUniprots adds the given related objects to the existing relationships +// of the seqhash, optionally inserting them as new records. +// Appends related to o.R.Uniprots. +// Sets related.R.UniprotSeqhash appropriately. +func (o *Seqhash) AddUniprots(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Uniprot) error { + var err error + for _, rel := range related { + if insert { + rel.Seqhash = o.Seqhash + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"uniprot\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"seqhash"}), + strmangle.WhereClause("\"", "\"", 0, uniprotPrimaryKeyColumns), + ) + values := []interface{}{o.Seqhash, rel.Accession} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.Seqhash = o.Seqhash + } + } + + if o.R == nil { + o.R = &seqhashR{ + Uniprots: related, + } + } else { + o.R.Uniprots = append(o.R.Uniprots, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &uniprotR{ + UniprotSeqhash: o, + } + } else { + rel.R.UniprotSeqhash = o + } + } + return nil +} + +// Seqhashes retrieves all the records using an executor. +func Seqhashes(mods ...qm.QueryMod) seqhashQuery { + mods = append(mods, qm.From("\"seqhash\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"seqhash\".*"}) + } + + return seqhashQuery{q} +} + +// FindSeqhash retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindSeqhash(ctx context.Context, exec boil.ContextExecutor, seqhash string, selectCols ...string) (*Seqhash, error) { + seqhashObj := &Seqhash{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"seqhash\" where \"seqhash\"=?", sel, + ) + + q := queries.Raw(query, seqhash) + + err := q.Bind(ctx, exec, seqhashObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from seqhash") + } + + if err = seqhashObj.doAfterSelectHooks(ctx, exec); err != nil { + return seqhashObj, err + } + + return seqhashObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *Seqhash) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no seqhash provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(seqhashColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + seqhashInsertCacheMut.RLock() + cache, cached := seqhashInsertCache[key] + seqhashInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + seqhashAllColumns, + seqhashColumnsWithDefault, + seqhashColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(seqhashType, seqhashMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(seqhashType, seqhashMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"seqhash\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"seqhash\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into seqhash") + } + + if !cached { + seqhashInsertCacheMut.Lock() + seqhashInsertCache[key] = cache + seqhashInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the Seqhash. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *Seqhash) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + seqhashUpdateCacheMut.RLock() + cache, cached := seqhashUpdateCache[key] + seqhashUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + seqhashAllColumns, + seqhashPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update seqhash, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"seqhash\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, seqhashPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(seqhashType, seqhashMapping, append(wl, seqhashPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update seqhash row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for seqhash") + } + + if !cached { + seqhashUpdateCacheMut.Lock() + seqhashUpdateCache[key] = cache + seqhashUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q seqhashQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for seqhash") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for seqhash") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o SeqhashSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), seqhashPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"seqhash\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, seqhashPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in seqhash slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all seqhash") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *Seqhash) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no seqhash provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(seqhashColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + seqhashUpsertCacheMut.RLock() + cache, cached := seqhashUpsertCache[key] + seqhashUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + seqhashAllColumns, + seqhashColumnsWithDefault, + seqhashColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + seqhashAllColumns, + seqhashPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert seqhash, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(seqhashPrimaryKeyColumns)) + copy(conflict, seqhashPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"seqhash\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(seqhashType, seqhashMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(seqhashType, seqhashMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert seqhash") + } + + if !cached { + seqhashUpsertCacheMut.Lock() + seqhashUpsertCache[key] = cache + seqhashUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single Seqhash record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *Seqhash) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no Seqhash provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), seqhashPrimaryKeyMapping) + sql := "DELETE FROM \"seqhash\" WHERE \"seqhash\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from seqhash") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for seqhash") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q seqhashQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no seqhashQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from seqhash") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for seqhash") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o SeqhashSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(seqhashBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), seqhashPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"seqhash\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, seqhashPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from seqhash slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for seqhash") + } + + if len(seqhashAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *Seqhash) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindSeqhash(ctx, exec, o.Seqhash) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *SeqhashSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := SeqhashSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), seqhashPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"seqhash\".* FROM \"seqhash\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, seqhashPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in SeqhashSlice") + } + + *o = slice + + return nil +} + +// SeqhashExists checks if the Seqhash row exists. +func SeqhashExists(ctx context.Context, exec boil.ContextExecutor, seqhash string) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"seqhash\" where \"seqhash\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, seqhash) + } + row := exec.QueryRowContext(ctx, sql, seqhash) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if seqhash exists") + } + + return exists, nil +} diff --git a/models/site_components.go b/models/site_components.go new file mode 100644 index 0000000..25c4bdc --- /dev/null +++ b/models/site_components.go @@ -0,0 +1,1502 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// SiteComponent is an object representing the database table. +type SiteComponent struct { + SitecompID int64 `boil:"sitecomp_id" json:"sitecomp_id" toml:"sitecomp_id" yaml:"sitecomp_id"` + SiteID int64 `boil:"site_id" json:"site_id" toml:"site_id" yaml:"site_id"` + ComponentID null.Int64 `boil:"component_id" json:"component_id,omitempty" toml:"component_id" yaml:"component_id,omitempty"` + DomainID null.Int64 `boil:"domain_id" json:"domain_id,omitempty" toml:"domain_id" yaml:"domain_id,omitempty"` + SiteResidues null.String `boil:"site_residues" json:"site_residues,omitempty" toml:"site_residues" yaml:"site_residues,omitempty"` + + R *siteComponentR `boil:"-" json:"-" toml:"-" yaml:"-"` + L siteComponentL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var SiteComponentColumns = struct { + SitecompID string + SiteID string + ComponentID string + DomainID string + SiteResidues string +}{ + SitecompID: "sitecomp_id", + SiteID: "site_id", + ComponentID: "component_id", + DomainID: "domain_id", + SiteResidues: "site_residues", +} + +var SiteComponentTableColumns = struct { + SitecompID string + SiteID string + ComponentID string + DomainID string + SiteResidues string +}{ + SitecompID: "site_components.sitecomp_id", + SiteID: "site_components.site_id", + ComponentID: "site_components.component_id", + DomainID: "site_components.domain_id", + SiteResidues: "site_components.site_residues", +} + +// Generated where + +var SiteComponentWhere = struct { + SitecompID whereHelperint64 + SiteID whereHelperint64 + ComponentID whereHelpernull_Int64 + DomainID whereHelpernull_Int64 + SiteResidues whereHelpernull_String +}{ + SitecompID: whereHelperint64{field: "\"site_components\".\"sitecomp_id\""}, + SiteID: whereHelperint64{field: "\"site_components\".\"site_id\""}, + ComponentID: whereHelpernull_Int64{field: "\"site_components\".\"component_id\""}, + DomainID: whereHelpernull_Int64{field: "\"site_components\".\"domain_id\""}, + SiteResidues: whereHelpernull_String{field: "\"site_components\".\"site_residues\""}, +} + +// SiteComponentRels is where relationship names are stored. +var SiteComponentRels = struct { + Site string + Domain string + Component string +}{ + Site: "Site", + Domain: "Domain", + Component: "Component", +} + +// siteComponentR is where relationships are stored. +type siteComponentR struct { + Site *BindingSite `boil:"Site" json:"Site" toml:"Site" yaml:"Site"` + Domain *Domain `boil:"Domain" json:"Domain" toml:"Domain" yaml:"Domain"` + Component *ComponentSequence `boil:"Component" json:"Component" toml:"Component" yaml:"Component"` +} + +// NewStruct creates a new relationship struct +func (*siteComponentR) NewStruct() *siteComponentR { + return &siteComponentR{} +} + +func (r *siteComponentR) GetSite() *BindingSite { + if r == nil { + return nil + } + return r.Site +} + +func (r *siteComponentR) GetDomain() *Domain { + if r == nil { + return nil + } + return r.Domain +} + +func (r *siteComponentR) GetComponent() *ComponentSequence { + if r == nil { + return nil + } + return r.Component +} + +// siteComponentL is where Load methods for each relationship are stored. +type siteComponentL struct{} + +var ( + siteComponentAllColumns = []string{"sitecomp_id", "site_id", "component_id", "domain_id", "site_residues"} + siteComponentColumnsWithoutDefault = []string{"sitecomp_id", "site_id"} + siteComponentColumnsWithDefault = []string{"component_id", "domain_id", "site_residues"} + siteComponentPrimaryKeyColumns = []string{"sitecomp_id"} + siteComponentGeneratedColumns = []string{} +) + +type ( + // SiteComponentSlice is an alias for a slice of pointers to SiteComponent. + // This should almost always be used instead of []SiteComponent. + SiteComponentSlice []*SiteComponent + // SiteComponentHook is the signature for custom SiteComponent hook methods + SiteComponentHook func(context.Context, boil.ContextExecutor, *SiteComponent) error + + siteComponentQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + siteComponentType = reflect.TypeOf(&SiteComponent{}) + siteComponentMapping = queries.MakeStructMapping(siteComponentType) + siteComponentPrimaryKeyMapping, _ = queries.BindMapping(siteComponentType, siteComponentMapping, siteComponentPrimaryKeyColumns) + siteComponentInsertCacheMut sync.RWMutex + siteComponentInsertCache = make(map[string]insertCache) + siteComponentUpdateCacheMut sync.RWMutex + siteComponentUpdateCache = make(map[string]updateCache) + siteComponentUpsertCacheMut sync.RWMutex + siteComponentUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var siteComponentAfterSelectHooks []SiteComponentHook + +var siteComponentBeforeInsertHooks []SiteComponentHook +var siteComponentAfterInsertHooks []SiteComponentHook + +var siteComponentBeforeUpdateHooks []SiteComponentHook +var siteComponentAfterUpdateHooks []SiteComponentHook + +var siteComponentBeforeDeleteHooks []SiteComponentHook +var siteComponentAfterDeleteHooks []SiteComponentHook + +var siteComponentBeforeUpsertHooks []SiteComponentHook +var siteComponentAfterUpsertHooks []SiteComponentHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *SiteComponent) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range siteComponentAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *SiteComponent) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range siteComponentBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *SiteComponent) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range siteComponentAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *SiteComponent) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range siteComponentBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *SiteComponent) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range siteComponentAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *SiteComponent) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range siteComponentBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *SiteComponent) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range siteComponentAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *SiteComponent) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range siteComponentBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *SiteComponent) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range siteComponentAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddSiteComponentHook registers your hook function for all future operations. +func AddSiteComponentHook(hookPoint boil.HookPoint, siteComponentHook SiteComponentHook) { + switch hookPoint { + case boil.AfterSelectHook: + siteComponentAfterSelectHooks = append(siteComponentAfterSelectHooks, siteComponentHook) + case boil.BeforeInsertHook: + siteComponentBeforeInsertHooks = append(siteComponentBeforeInsertHooks, siteComponentHook) + case boil.AfterInsertHook: + siteComponentAfterInsertHooks = append(siteComponentAfterInsertHooks, siteComponentHook) + case boil.BeforeUpdateHook: + siteComponentBeforeUpdateHooks = append(siteComponentBeforeUpdateHooks, siteComponentHook) + case boil.AfterUpdateHook: + siteComponentAfterUpdateHooks = append(siteComponentAfterUpdateHooks, siteComponentHook) + case boil.BeforeDeleteHook: + siteComponentBeforeDeleteHooks = append(siteComponentBeforeDeleteHooks, siteComponentHook) + case boil.AfterDeleteHook: + siteComponentAfterDeleteHooks = append(siteComponentAfterDeleteHooks, siteComponentHook) + case boil.BeforeUpsertHook: + siteComponentBeforeUpsertHooks = append(siteComponentBeforeUpsertHooks, siteComponentHook) + case boil.AfterUpsertHook: + siteComponentAfterUpsertHooks = append(siteComponentAfterUpsertHooks, siteComponentHook) + } +} + +// One returns a single siteComponent record from the query. +func (q siteComponentQuery) One(ctx context.Context, exec boil.ContextExecutor) (*SiteComponent, error) { + o := &SiteComponent{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for site_components") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all SiteComponent records from the query. +func (q siteComponentQuery) All(ctx context.Context, exec boil.ContextExecutor) (SiteComponentSlice, error) { + var o []*SiteComponent + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to SiteComponent slice") + } + + if len(siteComponentAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all SiteComponent records in the query. +func (q siteComponentQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count site_components rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q siteComponentQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if site_components exists") + } + + return count > 0, nil +} + +// Site pointed to by the foreign key. +func (o *SiteComponent) Site(mods ...qm.QueryMod) bindingSiteQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"site_id\" = ?", o.SiteID), + } + + queryMods = append(queryMods, mods...) + + return BindingSites(queryMods...) +} + +// Domain pointed to by the foreign key. +func (o *SiteComponent) Domain(mods ...qm.QueryMod) domainQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"domain_id\" = ?", o.DomainID), + } + + queryMods = append(queryMods, mods...) + + return Domains(queryMods...) +} + +// Component pointed to by the foreign key. +func (o *SiteComponent) Component(mods ...qm.QueryMod) componentSequenceQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"component_id\" = ?", o.ComponentID), + } + + queryMods = append(queryMods, mods...) + + return ComponentSequences(queryMods...) +} + +// LoadSite allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (siteComponentL) LoadSite(ctx context.Context, e boil.ContextExecutor, singular bool, maybeSiteComponent interface{}, mods queries.Applicator) error { + var slice []*SiteComponent + var object *SiteComponent + + if singular { + object = maybeSiteComponent.(*SiteComponent) + } else { + slice = *maybeSiteComponent.(*[]*SiteComponent) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &siteComponentR{} + } + args = append(args, object.SiteID) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &siteComponentR{} + } + + for _, a := range args { + if a == obj.SiteID { + continue Outer + } + } + + args = append(args, obj.SiteID) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`binding_sites`), + qm.WhereIn(`binding_sites.site_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load BindingSite") + } + + var resultSlice []*BindingSite + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice BindingSite") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for binding_sites") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for binding_sites") + } + + if len(siteComponentAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Site = foreign + if foreign.R == nil { + foreign.R = &bindingSiteR{} + } + foreign.R.SiteSiteComponents = append(foreign.R.SiteSiteComponents, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.SiteID == foreign.SiteID { + local.R.Site = foreign + if foreign.R == nil { + foreign.R = &bindingSiteR{} + } + foreign.R.SiteSiteComponents = append(foreign.R.SiteSiteComponents, local) + break + } + } + } + + return nil +} + +// LoadDomain allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (siteComponentL) LoadDomain(ctx context.Context, e boil.ContextExecutor, singular bool, maybeSiteComponent interface{}, mods queries.Applicator) error { + var slice []*SiteComponent + var object *SiteComponent + + if singular { + object = maybeSiteComponent.(*SiteComponent) + } else { + slice = *maybeSiteComponent.(*[]*SiteComponent) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &siteComponentR{} + } + if !queries.IsNil(object.DomainID) { + args = append(args, object.DomainID) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &siteComponentR{} + } + + for _, a := range args { + if queries.Equal(a, obj.DomainID) { + continue Outer + } + } + + if !queries.IsNil(obj.DomainID) { + args = append(args, obj.DomainID) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`domains`), + qm.WhereIn(`domains.domain_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load Domain") + } + + var resultSlice []*Domain + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice Domain") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for domains") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for domains") + } + + if len(siteComponentAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Domain = foreign + if foreign.R == nil { + foreign.R = &domainR{} + } + foreign.R.SiteComponents = append(foreign.R.SiteComponents, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.DomainID, foreign.DomainID) { + local.R.Domain = foreign + if foreign.R == nil { + foreign.R = &domainR{} + } + foreign.R.SiteComponents = append(foreign.R.SiteComponents, local) + break + } + } + } + + return nil +} + +// LoadComponent allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (siteComponentL) LoadComponent(ctx context.Context, e boil.ContextExecutor, singular bool, maybeSiteComponent interface{}, mods queries.Applicator) error { + var slice []*SiteComponent + var object *SiteComponent + + if singular { + object = maybeSiteComponent.(*SiteComponent) + } else { + slice = *maybeSiteComponent.(*[]*SiteComponent) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &siteComponentR{} + } + if !queries.IsNil(object.ComponentID) { + args = append(args, object.ComponentID) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &siteComponentR{} + } + + for _, a := range args { + if queries.Equal(a, obj.ComponentID) { + continue Outer + } + } + + if !queries.IsNil(obj.ComponentID) { + args = append(args, obj.ComponentID) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`component_sequences`), + qm.WhereIn(`component_sequences.component_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load ComponentSequence") + } + + var resultSlice []*ComponentSequence + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice ComponentSequence") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for component_sequences") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for component_sequences") + } + + if len(siteComponentAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Component = foreign + if foreign.R == nil { + foreign.R = &componentSequenceR{} + } + foreign.R.ComponentSiteComponents = append(foreign.R.ComponentSiteComponents, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.ComponentID, foreign.ComponentID) { + local.R.Component = foreign + if foreign.R == nil { + foreign.R = &componentSequenceR{} + } + foreign.R.ComponentSiteComponents = append(foreign.R.ComponentSiteComponents, local) + break + } + } + } + + return nil +} + +// SetSite of the siteComponent to the related item. +// Sets o.R.Site to related. +// Adds o to related.R.SiteSiteComponents. +func (o *SiteComponent) SetSite(ctx context.Context, exec boil.ContextExecutor, insert bool, related *BindingSite) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"site_components\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"site_id"}), + strmangle.WhereClause("\"", "\"", 0, siteComponentPrimaryKeyColumns), + ) + values := []interface{}{related.SiteID, o.SitecompID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.SiteID = related.SiteID + if o.R == nil { + o.R = &siteComponentR{ + Site: related, + } + } else { + o.R.Site = related + } + + if related.R == nil { + related.R = &bindingSiteR{ + SiteSiteComponents: SiteComponentSlice{o}, + } + } else { + related.R.SiteSiteComponents = append(related.R.SiteSiteComponents, o) + } + + return nil +} + +// SetDomain of the siteComponent to the related item. +// Sets o.R.Domain to related. +// Adds o to related.R.SiteComponents. +func (o *SiteComponent) SetDomain(ctx context.Context, exec boil.ContextExecutor, insert bool, related *Domain) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"site_components\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"domain_id"}), + strmangle.WhereClause("\"", "\"", 0, siteComponentPrimaryKeyColumns), + ) + values := []interface{}{related.DomainID, o.SitecompID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.DomainID, related.DomainID) + if o.R == nil { + o.R = &siteComponentR{ + Domain: related, + } + } else { + o.R.Domain = related + } + + if related.R == nil { + related.R = &domainR{ + SiteComponents: SiteComponentSlice{o}, + } + } else { + related.R.SiteComponents = append(related.R.SiteComponents, o) + } + + return nil +} + +// RemoveDomain relationship. +// Sets o.R.Domain to nil. +// Removes o from all passed in related items' relationships struct. +func (o *SiteComponent) RemoveDomain(ctx context.Context, exec boil.ContextExecutor, related *Domain) error { + var err error + + queries.SetScanner(&o.DomainID, nil) + if _, err = o.Update(ctx, exec, boil.Whitelist("domain_id")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.Domain = nil + } + if related == nil || related.R == nil { + return nil + } + + for i, ri := range related.R.SiteComponents { + if queries.Equal(o.DomainID, ri.DomainID) { + continue + } + + ln := len(related.R.SiteComponents) + if ln > 1 && i < ln-1 { + related.R.SiteComponents[i] = related.R.SiteComponents[ln-1] + } + related.R.SiteComponents = related.R.SiteComponents[:ln-1] + break + } + return nil +} + +// SetComponent of the siteComponent to the related item. +// Sets o.R.Component to related. +// Adds o to related.R.ComponentSiteComponents. +func (o *SiteComponent) SetComponent(ctx context.Context, exec boil.ContextExecutor, insert bool, related *ComponentSequence) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"site_components\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"component_id"}), + strmangle.WhereClause("\"", "\"", 0, siteComponentPrimaryKeyColumns), + ) + values := []interface{}{related.ComponentID, o.SitecompID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.ComponentID, related.ComponentID) + if o.R == nil { + o.R = &siteComponentR{ + Component: related, + } + } else { + o.R.Component = related + } + + if related.R == nil { + related.R = &componentSequenceR{ + ComponentSiteComponents: SiteComponentSlice{o}, + } + } else { + related.R.ComponentSiteComponents = append(related.R.ComponentSiteComponents, o) + } + + return nil +} + +// RemoveComponent relationship. +// Sets o.R.Component to nil. +// Removes o from all passed in related items' relationships struct. +func (o *SiteComponent) RemoveComponent(ctx context.Context, exec boil.ContextExecutor, related *ComponentSequence) error { + var err error + + queries.SetScanner(&o.ComponentID, nil) + if _, err = o.Update(ctx, exec, boil.Whitelist("component_id")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.Component = nil + } + if related == nil || related.R == nil { + return nil + } + + for i, ri := range related.R.ComponentSiteComponents { + if queries.Equal(o.ComponentID, ri.ComponentID) { + continue + } + + ln := len(related.R.ComponentSiteComponents) + if ln > 1 && i < ln-1 { + related.R.ComponentSiteComponents[i] = related.R.ComponentSiteComponents[ln-1] + } + related.R.ComponentSiteComponents = related.R.ComponentSiteComponents[:ln-1] + break + } + return nil +} + +// SiteComponents retrieves all the records using an executor. +func SiteComponents(mods ...qm.QueryMod) siteComponentQuery { + mods = append(mods, qm.From("\"site_components\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"site_components\".*"}) + } + + return siteComponentQuery{q} +} + +// FindSiteComponent retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindSiteComponent(ctx context.Context, exec boil.ContextExecutor, sitecompID int64, selectCols ...string) (*SiteComponent, error) { + siteComponentObj := &SiteComponent{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"site_components\" where \"sitecomp_id\"=?", sel, + ) + + q := queries.Raw(query, sitecompID) + + err := q.Bind(ctx, exec, siteComponentObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from site_components") + } + + if err = siteComponentObj.doAfterSelectHooks(ctx, exec); err != nil { + return siteComponentObj, err + } + + return siteComponentObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *SiteComponent) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no site_components provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(siteComponentColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + siteComponentInsertCacheMut.RLock() + cache, cached := siteComponentInsertCache[key] + siteComponentInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + siteComponentAllColumns, + siteComponentColumnsWithDefault, + siteComponentColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(siteComponentType, siteComponentMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(siteComponentType, siteComponentMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"site_components\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"site_components\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into site_components") + } + + if !cached { + siteComponentInsertCacheMut.Lock() + siteComponentInsertCache[key] = cache + siteComponentInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the SiteComponent. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *SiteComponent) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + siteComponentUpdateCacheMut.RLock() + cache, cached := siteComponentUpdateCache[key] + siteComponentUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + siteComponentAllColumns, + siteComponentPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update site_components, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"site_components\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, siteComponentPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(siteComponentType, siteComponentMapping, append(wl, siteComponentPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update site_components row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for site_components") + } + + if !cached { + siteComponentUpdateCacheMut.Lock() + siteComponentUpdateCache[key] = cache + siteComponentUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q siteComponentQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for site_components") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for site_components") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o SiteComponentSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), siteComponentPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"site_components\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, siteComponentPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in siteComponent slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all siteComponent") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *SiteComponent) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no site_components provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(siteComponentColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + siteComponentUpsertCacheMut.RLock() + cache, cached := siteComponentUpsertCache[key] + siteComponentUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + siteComponentAllColumns, + siteComponentColumnsWithDefault, + siteComponentColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + siteComponentAllColumns, + siteComponentPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert site_components, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(siteComponentPrimaryKeyColumns)) + copy(conflict, siteComponentPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"site_components\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(siteComponentType, siteComponentMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(siteComponentType, siteComponentMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert site_components") + } + + if !cached { + siteComponentUpsertCacheMut.Lock() + siteComponentUpsertCache[key] = cache + siteComponentUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single SiteComponent record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *SiteComponent) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no SiteComponent provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), siteComponentPrimaryKeyMapping) + sql := "DELETE FROM \"site_components\" WHERE \"sitecomp_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from site_components") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for site_components") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q siteComponentQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no siteComponentQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from site_components") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for site_components") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o SiteComponentSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(siteComponentBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), siteComponentPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"site_components\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, siteComponentPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from siteComponent slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for site_components") + } + + if len(siteComponentAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *SiteComponent) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindSiteComponent(ctx, exec, o.SitecompID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *SiteComponentSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := SiteComponentSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), siteComponentPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"site_components\".* FROM \"site_components\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, siteComponentPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in SiteComponentSlice") + } + + *o = slice + + return nil +} + +// SiteComponentExists checks if the SiteComponent row exists. +func SiteComponentExists(ctx context.Context, exec boil.ContextExecutor, sitecompID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"site_components\" where \"sitecomp_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, sitecompID) + } + row := exec.QueryRowContext(ctx, sql, sitecompID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if site_components exists") + } + + return exists, nil +} diff --git a/models/source.go b/models/source.go new file mode 100644 index 0000000..11e36f9 --- /dev/null +++ b/models/source.go @@ -0,0 +1,1674 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// Source is an object representing the database table. +type Source struct { + SRCID int64 `boil:"src_id" json:"src_id" toml:"src_id" yaml:"src_id"` + SRCDescription null.String `boil:"src_description" json:"src_description,omitempty" toml:"src_description" yaml:"src_description,omitempty"` + SRCShortName null.String `boil:"src_short_name" json:"src_short_name,omitempty" toml:"src_short_name" yaml:"src_short_name,omitempty"` + + R *sourceR `boil:"-" json:"-" toml:"-" yaml:"-"` + L sourceL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var SourceColumns = struct { + SRCID string + SRCDescription string + SRCShortName string +}{ + SRCID: "src_id", + SRCDescription: "src_description", + SRCShortName: "src_short_name", +} + +var SourceTableColumns = struct { + SRCID string + SRCDescription string + SRCShortName string +}{ + SRCID: "source.src_id", + SRCDescription: "source.src_description", + SRCShortName: "source.src_short_name", +} + +// Generated where + +var SourceWhere = struct { + SRCID whereHelperint64 + SRCDescription whereHelpernull_String + SRCShortName whereHelpernull_String +}{ + SRCID: whereHelperint64{field: "\"source\".\"src_id\""}, + SRCDescription: whereHelpernull_String{field: "\"source\".\"src_description\""}, + SRCShortName: whereHelpernull_String{field: "\"source\".\"src_short_name\""}, +} + +// SourceRels is where relationship names are stored. +var SourceRels = struct { + SRCActivities string + SRCAssays string + SRCCompoundRecords string + SRCDocs string +}{ + SRCActivities: "SRCActivities", + SRCAssays: "SRCAssays", + SRCCompoundRecords: "SRCCompoundRecords", + SRCDocs: "SRCDocs", +} + +// sourceR is where relationships are stored. +type sourceR struct { + SRCActivities ActivitySlice `boil:"SRCActivities" json:"SRCActivities" toml:"SRCActivities" yaml:"SRCActivities"` + SRCAssays AssaySlice `boil:"SRCAssays" json:"SRCAssays" toml:"SRCAssays" yaml:"SRCAssays"` + SRCCompoundRecords CompoundRecordSlice `boil:"SRCCompoundRecords" json:"SRCCompoundRecords" toml:"SRCCompoundRecords" yaml:"SRCCompoundRecords"` + SRCDocs DocSlice `boil:"SRCDocs" json:"SRCDocs" toml:"SRCDocs" yaml:"SRCDocs"` +} + +// NewStruct creates a new relationship struct +func (*sourceR) NewStruct() *sourceR { + return &sourceR{} +} + +func (r *sourceR) GetSRCActivities() ActivitySlice { + if r == nil { + return nil + } + return r.SRCActivities +} + +func (r *sourceR) GetSRCAssays() AssaySlice { + if r == nil { + return nil + } + return r.SRCAssays +} + +func (r *sourceR) GetSRCCompoundRecords() CompoundRecordSlice { + if r == nil { + return nil + } + return r.SRCCompoundRecords +} + +func (r *sourceR) GetSRCDocs() DocSlice { + if r == nil { + return nil + } + return r.SRCDocs +} + +// sourceL is where Load methods for each relationship are stored. +type sourceL struct{} + +var ( + sourceAllColumns = []string{"src_id", "src_description", "src_short_name"} + sourceColumnsWithoutDefault = []string{} + sourceColumnsWithDefault = []string{"src_id", "src_description", "src_short_name"} + sourcePrimaryKeyColumns = []string{"src_id"} + sourceGeneratedColumns = []string{"src_id"} +) + +type ( + // SourceSlice is an alias for a slice of pointers to Source. + // This should almost always be used instead of []Source. + SourceSlice []*Source + // SourceHook is the signature for custom Source hook methods + SourceHook func(context.Context, boil.ContextExecutor, *Source) error + + sourceQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + sourceType = reflect.TypeOf(&Source{}) + sourceMapping = queries.MakeStructMapping(sourceType) + sourcePrimaryKeyMapping, _ = queries.BindMapping(sourceType, sourceMapping, sourcePrimaryKeyColumns) + sourceInsertCacheMut sync.RWMutex + sourceInsertCache = make(map[string]insertCache) + sourceUpdateCacheMut sync.RWMutex + sourceUpdateCache = make(map[string]updateCache) + sourceUpsertCacheMut sync.RWMutex + sourceUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var sourceAfterSelectHooks []SourceHook + +var sourceBeforeInsertHooks []SourceHook +var sourceAfterInsertHooks []SourceHook + +var sourceBeforeUpdateHooks []SourceHook +var sourceAfterUpdateHooks []SourceHook + +var sourceBeforeDeleteHooks []SourceHook +var sourceAfterDeleteHooks []SourceHook + +var sourceBeforeUpsertHooks []SourceHook +var sourceAfterUpsertHooks []SourceHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *Source) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range sourceAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *Source) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range sourceBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *Source) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range sourceAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *Source) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range sourceBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *Source) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range sourceAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *Source) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range sourceBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *Source) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range sourceAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *Source) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range sourceBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *Source) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range sourceAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddSourceHook registers your hook function for all future operations. +func AddSourceHook(hookPoint boil.HookPoint, sourceHook SourceHook) { + switch hookPoint { + case boil.AfterSelectHook: + sourceAfterSelectHooks = append(sourceAfterSelectHooks, sourceHook) + case boil.BeforeInsertHook: + sourceBeforeInsertHooks = append(sourceBeforeInsertHooks, sourceHook) + case boil.AfterInsertHook: + sourceAfterInsertHooks = append(sourceAfterInsertHooks, sourceHook) + case boil.BeforeUpdateHook: + sourceBeforeUpdateHooks = append(sourceBeforeUpdateHooks, sourceHook) + case boil.AfterUpdateHook: + sourceAfterUpdateHooks = append(sourceAfterUpdateHooks, sourceHook) + case boil.BeforeDeleteHook: + sourceBeforeDeleteHooks = append(sourceBeforeDeleteHooks, sourceHook) + case boil.AfterDeleteHook: + sourceAfterDeleteHooks = append(sourceAfterDeleteHooks, sourceHook) + case boil.BeforeUpsertHook: + sourceBeforeUpsertHooks = append(sourceBeforeUpsertHooks, sourceHook) + case boil.AfterUpsertHook: + sourceAfterUpsertHooks = append(sourceAfterUpsertHooks, sourceHook) + } +} + +// One returns a single source record from the query. +func (q sourceQuery) One(ctx context.Context, exec boil.ContextExecutor) (*Source, error) { + o := &Source{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for source") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all Source records from the query. +func (q sourceQuery) All(ctx context.Context, exec boil.ContextExecutor) (SourceSlice, error) { + var o []*Source + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to Source slice") + } + + if len(sourceAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all Source records in the query. +func (q sourceQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count source rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q sourceQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if source exists") + } + + return count > 0, nil +} + +// SRCActivities retrieves all the activity's Activities with an executor via src_id column. +func (o *Source) SRCActivities(mods ...qm.QueryMod) activityQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"activities\".\"src_id\"=?", o.SRCID), + ) + + return Activities(queryMods...) +} + +// SRCAssays retrieves all the assay's Assays with an executor via src_id column. +func (o *Source) SRCAssays(mods ...qm.QueryMod) assayQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"assays\".\"src_id\"=?", o.SRCID), + ) + + return Assays(queryMods...) +} + +// SRCCompoundRecords retrieves all the compound_record's CompoundRecords with an executor via src_id column. +func (o *Source) SRCCompoundRecords(mods ...qm.QueryMod) compoundRecordQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"compound_records\".\"src_id\"=?", o.SRCID), + ) + + return CompoundRecords(queryMods...) +} + +// SRCDocs retrieves all the doc's Docs with an executor via src_id column. +func (o *Source) SRCDocs(mods ...qm.QueryMod) docQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"docs\".\"src_id\"=?", o.SRCID), + ) + + return Docs(queryMods...) +} + +// LoadSRCActivities allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (sourceL) LoadSRCActivities(ctx context.Context, e boil.ContextExecutor, singular bool, maybeSource interface{}, mods queries.Applicator) error { + var slice []*Source + var object *Source + + if singular { + object = maybeSource.(*Source) + } else { + slice = *maybeSource.(*[]*Source) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &sourceR{} + } + args = append(args, object.SRCID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &sourceR{} + } + + for _, a := range args { + if queries.Equal(a, obj.SRCID) { + continue Outer + } + } + + args = append(args, obj.SRCID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`activities`), + qm.WhereIn(`activities.src_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load activities") + } + + var resultSlice []*Activity + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice activities") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on activities") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for activities") + } + + if len(activityAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.SRCActivities = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &activityR{} + } + foreign.R.SRC = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.SRCID, foreign.SRCID) { + local.R.SRCActivities = append(local.R.SRCActivities, foreign) + if foreign.R == nil { + foreign.R = &activityR{} + } + foreign.R.SRC = local + break + } + } + } + + return nil +} + +// LoadSRCAssays allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (sourceL) LoadSRCAssays(ctx context.Context, e boil.ContextExecutor, singular bool, maybeSource interface{}, mods queries.Applicator) error { + var slice []*Source + var object *Source + + if singular { + object = maybeSource.(*Source) + } else { + slice = *maybeSource.(*[]*Source) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &sourceR{} + } + args = append(args, object.SRCID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &sourceR{} + } + + for _, a := range args { + if a == obj.SRCID { + continue Outer + } + } + + args = append(args, obj.SRCID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`assays`), + qm.WhereIn(`assays.src_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load assays") + } + + var resultSlice []*Assay + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice assays") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on assays") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for assays") + } + + if len(assayAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.SRCAssays = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &assayR{} + } + foreign.R.SRC = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.SRCID == foreign.SRCID { + local.R.SRCAssays = append(local.R.SRCAssays, foreign) + if foreign.R == nil { + foreign.R = &assayR{} + } + foreign.R.SRC = local + break + } + } + } + + return nil +} + +// LoadSRCCompoundRecords allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (sourceL) LoadSRCCompoundRecords(ctx context.Context, e boil.ContextExecutor, singular bool, maybeSource interface{}, mods queries.Applicator) error { + var slice []*Source + var object *Source + + if singular { + object = maybeSource.(*Source) + } else { + slice = *maybeSource.(*[]*Source) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &sourceR{} + } + args = append(args, object.SRCID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &sourceR{} + } + + for _, a := range args { + if a == obj.SRCID { + continue Outer + } + } + + args = append(args, obj.SRCID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`compound_records`), + qm.WhereIn(`compound_records.src_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load compound_records") + } + + var resultSlice []*CompoundRecord + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice compound_records") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on compound_records") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for compound_records") + } + + if len(compoundRecordAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.SRCCompoundRecords = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &compoundRecordR{} + } + foreign.R.SRC = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.SRCID == foreign.SRCID { + local.R.SRCCompoundRecords = append(local.R.SRCCompoundRecords, foreign) + if foreign.R == nil { + foreign.R = &compoundRecordR{} + } + foreign.R.SRC = local + break + } + } + } + + return nil +} + +// LoadSRCDocs allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (sourceL) LoadSRCDocs(ctx context.Context, e boil.ContextExecutor, singular bool, maybeSource interface{}, mods queries.Applicator) error { + var slice []*Source + var object *Source + + if singular { + object = maybeSource.(*Source) + } else { + slice = *maybeSource.(*[]*Source) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &sourceR{} + } + args = append(args, object.SRCID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &sourceR{} + } + + for _, a := range args { + if a == obj.SRCID { + continue Outer + } + } + + args = append(args, obj.SRCID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`docs`), + qm.WhereIn(`docs.src_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load docs") + } + + var resultSlice []*Doc + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice docs") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on docs") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for docs") + } + + if len(docAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.SRCDocs = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &docR{} + } + foreign.R.SRC = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.SRCID == foreign.SRCID { + local.R.SRCDocs = append(local.R.SRCDocs, foreign) + if foreign.R == nil { + foreign.R = &docR{} + } + foreign.R.SRC = local + break + } + } + } + + return nil +} + +// AddSRCActivities adds the given related objects to the existing relationships +// of the source, optionally inserting them as new records. +// Appends related to o.R.SRCActivities. +// Sets related.R.SRC appropriately. +func (o *Source) AddSRCActivities(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Activity) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.SRCID, o.SRCID) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"activities\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"src_id"}), + strmangle.WhereClause("\"", "\"", 0, activityPrimaryKeyColumns), + ) + values := []interface{}{o.SRCID, rel.ActivityID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.SRCID, o.SRCID) + } + } + + if o.R == nil { + o.R = &sourceR{ + SRCActivities: related, + } + } else { + o.R.SRCActivities = append(o.R.SRCActivities, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &activityR{ + SRC: o, + } + } else { + rel.R.SRC = o + } + } + return nil +} + +// SetSRCActivities removes all previously related items of the +// source replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.SRC's SRCActivities accordingly. +// Replaces o.R.SRCActivities with related. +// Sets related.R.SRC's SRCActivities accordingly. +func (o *Source) SetSRCActivities(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Activity) error { + query := "update \"activities\" set \"src_id\" = null where \"src_id\" = ?" + values := []interface{}{o.SRCID} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + if o.R != nil { + for _, rel := range o.R.SRCActivities { + queries.SetScanner(&rel.SRCID, nil) + if rel.R == nil { + continue + } + + rel.R.SRC = nil + } + o.R.SRCActivities = nil + } + + return o.AddSRCActivities(ctx, exec, insert, related...) +} + +// RemoveSRCActivities relationships from objects passed in. +// Removes related items from R.SRCActivities (uses pointer comparison, removal does not keep order) +// Sets related.R.SRC. +func (o *Source) RemoveSRCActivities(ctx context.Context, exec boil.ContextExecutor, related ...*Activity) error { + if len(related) == 0 { + return nil + } + + var err error + for _, rel := range related { + queries.SetScanner(&rel.SRCID, nil) + if rel.R != nil { + rel.R.SRC = nil + } + if _, err = rel.Update(ctx, exec, boil.Whitelist("src_id")); err != nil { + return err + } + } + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.SRCActivities { + if rel != ri { + continue + } + + ln := len(o.R.SRCActivities) + if ln > 1 && i < ln-1 { + o.R.SRCActivities[i] = o.R.SRCActivities[ln-1] + } + o.R.SRCActivities = o.R.SRCActivities[:ln-1] + break + } + } + + return nil +} + +// AddSRCAssays adds the given related objects to the existing relationships +// of the source, optionally inserting them as new records. +// Appends related to o.R.SRCAssays. +// Sets related.R.SRC appropriately. +func (o *Source) AddSRCAssays(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Assay) error { + var err error + for _, rel := range related { + if insert { + rel.SRCID = o.SRCID + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"assays\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"src_id"}), + strmangle.WhereClause("\"", "\"", 0, assayPrimaryKeyColumns), + ) + values := []interface{}{o.SRCID, rel.AssayID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.SRCID = o.SRCID + } + } + + if o.R == nil { + o.R = &sourceR{ + SRCAssays: related, + } + } else { + o.R.SRCAssays = append(o.R.SRCAssays, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &assayR{ + SRC: o, + } + } else { + rel.R.SRC = o + } + } + return nil +} + +// AddSRCCompoundRecords adds the given related objects to the existing relationships +// of the source, optionally inserting them as new records. +// Appends related to o.R.SRCCompoundRecords. +// Sets related.R.SRC appropriately. +func (o *Source) AddSRCCompoundRecords(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*CompoundRecord) error { + var err error + for _, rel := range related { + if insert { + rel.SRCID = o.SRCID + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"compound_records\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"src_id"}), + strmangle.WhereClause("\"", "\"", 0, compoundRecordPrimaryKeyColumns), + ) + values := []interface{}{o.SRCID, rel.RecordID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.SRCID = o.SRCID + } + } + + if o.R == nil { + o.R = &sourceR{ + SRCCompoundRecords: related, + } + } else { + o.R.SRCCompoundRecords = append(o.R.SRCCompoundRecords, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &compoundRecordR{ + SRC: o, + } + } else { + rel.R.SRC = o + } + } + return nil +} + +// AddSRCDocs adds the given related objects to the existing relationships +// of the source, optionally inserting them as new records. +// Appends related to o.R.SRCDocs. +// Sets related.R.SRC appropriately. +func (o *Source) AddSRCDocs(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Doc) error { + var err error + for _, rel := range related { + if insert { + rel.SRCID = o.SRCID + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"docs\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"src_id"}), + strmangle.WhereClause("\"", "\"", 0, docPrimaryKeyColumns), + ) + values := []interface{}{o.SRCID, rel.DocID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.SRCID = o.SRCID + } + } + + if o.R == nil { + o.R = &sourceR{ + SRCDocs: related, + } + } else { + o.R.SRCDocs = append(o.R.SRCDocs, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &docR{ + SRC: o, + } + } else { + rel.R.SRC = o + } + } + return nil +} + +// Sources retrieves all the records using an executor. +func Sources(mods ...qm.QueryMod) sourceQuery { + mods = append(mods, qm.From("\"source\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"source\".*"}) + } + + return sourceQuery{q} +} + +// FindSource retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindSource(ctx context.Context, exec boil.ContextExecutor, sRCID int64, selectCols ...string) (*Source, error) { + sourceObj := &Source{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"source\" where \"src_id\"=?", sel, + ) + + q := queries.Raw(query, sRCID) + + err := q.Bind(ctx, exec, sourceObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from source") + } + + if err = sourceObj.doAfterSelectHooks(ctx, exec); err != nil { + return sourceObj, err + } + + return sourceObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *Source) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no source provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(sourceColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + sourceInsertCacheMut.RLock() + cache, cached := sourceInsertCache[key] + sourceInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + sourceAllColumns, + sourceColumnsWithDefault, + sourceColumnsWithoutDefault, + nzDefaults, + ) + wl = strmangle.SetComplement(wl, sourceGeneratedColumns) + + cache.valueMapping, err = queries.BindMapping(sourceType, sourceMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(sourceType, sourceMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"source\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"source\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into source") + } + + if !cached { + sourceInsertCacheMut.Lock() + sourceInsertCache[key] = cache + sourceInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the Source. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *Source) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + sourceUpdateCacheMut.RLock() + cache, cached := sourceUpdateCache[key] + sourceUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + sourceAllColumns, + sourcePrimaryKeyColumns, + ) + wl = strmangle.SetComplement(wl, sourceGeneratedColumns) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update source, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"source\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, sourcePrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(sourceType, sourceMapping, append(wl, sourcePrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update source row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for source") + } + + if !cached { + sourceUpdateCacheMut.Lock() + sourceUpdateCache[key] = cache + sourceUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q sourceQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for source") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for source") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o SourceSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), sourcePrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"source\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, sourcePrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in source slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all source") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *Source) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no source provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(sourceColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + sourceUpsertCacheMut.RLock() + cache, cached := sourceUpsertCache[key] + sourceUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + sourceAllColumns, + sourceColumnsWithDefault, + sourceColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + sourceAllColumns, + sourcePrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert source, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(sourcePrimaryKeyColumns)) + copy(conflict, sourcePrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"source\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(sourceType, sourceMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(sourceType, sourceMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert source") + } + + if !cached { + sourceUpsertCacheMut.Lock() + sourceUpsertCache[key] = cache + sourceUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single Source record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *Source) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no Source provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), sourcePrimaryKeyMapping) + sql := "DELETE FROM \"source\" WHERE \"src_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from source") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for source") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q sourceQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no sourceQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from source") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for source") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o SourceSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(sourceBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), sourcePrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"source\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, sourcePrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from source slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for source") + } + + if len(sourceAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *Source) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindSource(ctx, exec, o.SRCID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *SourceSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := SourceSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), sourcePrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"source\".* FROM \"source\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, sourcePrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in SourceSlice") + } + + *o = slice + + return nil +} + +// SourceExists checks if the Source row exists. +func SourceExists(ctx context.Context, exec boil.ContextExecutor, sRCID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"source\" where \"src_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, sRCID) + } + row := exec.QueryRowContext(ctx, sql, sRCID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if source exists") + } + + return exists, nil +} diff --git a/models/sqlite_upsert.go b/models/sqlite_upsert.go new file mode 100644 index 0000000..b6b65e6 --- /dev/null +++ b/models/sqlite_upsert.go @@ -0,0 +1,61 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "fmt" + "strings" + + "github.com/volatiletech/sqlboiler/v4/drivers" + "github.com/volatiletech/strmangle" +) + +// buildUpsertQuerySQLite builds a SQL statement string using the upsertData provided. +func buildUpsertQuerySQLite(dia drivers.Dialect, tableName string, updateOnConflict bool, ret, update, conflict, whitelist []string) string { + conflict = strmangle.IdentQuoteSlice(dia.LQ, dia.RQ, conflict) + whitelist = strmangle.IdentQuoteSlice(dia.LQ, dia.RQ, whitelist) + ret = strmangle.IdentQuoteSlice(dia.LQ, dia.RQ, ret) + + buf := strmangle.GetBuffer() + defer strmangle.PutBuffer(buf) + + columns := "DEFAULT VALUES" + if len(whitelist) != 0 { + columns = fmt.Sprintf("(%s) VALUES (%s)", + strings.Join(whitelist, ", "), + strmangle.Placeholders(dia.UseIndexPlaceholders, len(whitelist), 1, 1)) + } + + fmt.Fprintf( + buf, + "INSERT INTO %s %s ON CONFLICT ", + tableName, + columns, + ) + + if !updateOnConflict || len(update) == 0 { + buf.WriteString("DO NOTHING") + } else { + buf.WriteByte('(') + buf.WriteString(strings.Join(conflict, ", ")) + buf.WriteString(") DO UPDATE SET ") + + for i, v := range update { + if i != 0 { + buf.WriteByte(',') + } + quoted := strmangle.IdentQuote(dia.LQ, dia.RQ, v) + buf.WriteString(quoted) + buf.WriteString(" = EXCLUDED.") + buf.WriteString(quoted) + } + } + + if len(ret) != 0 { + buf.WriteString(" RETURNING ") + buf.WriteString(strings.Join(ret, ", ")) + } + + return buf.String() +} diff --git a/models/structural_alert_sets.go b/models/structural_alert_sets.go new file mode 100644 index 0000000..db6dd92 --- /dev/null +++ b/models/structural_alert_sets.go @@ -0,0 +1,1072 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// StructuralAlertSet is an object representing the database table. +type StructuralAlertSet struct { + AlertSetID int64 `boil:"alert_set_id" json:"alert_set_id" toml:"alert_set_id" yaml:"alert_set_id"` + SetName string `boil:"set_name" json:"set_name" toml:"set_name" yaml:"set_name"` + Priority int16 `boil:"priority" json:"priority" toml:"priority" yaml:"priority"` + + R *structuralAlertSetR `boil:"-" json:"-" toml:"-" yaml:"-"` + L structuralAlertSetL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var StructuralAlertSetColumns = struct { + AlertSetID string + SetName string + Priority string +}{ + AlertSetID: "alert_set_id", + SetName: "set_name", + Priority: "priority", +} + +var StructuralAlertSetTableColumns = struct { + AlertSetID string + SetName string + Priority string +}{ + AlertSetID: "structural_alert_sets.alert_set_id", + SetName: "structural_alert_sets.set_name", + Priority: "structural_alert_sets.priority", +} + +// Generated where + +var StructuralAlertSetWhere = struct { + AlertSetID whereHelperint64 + SetName whereHelperstring + Priority whereHelperint16 +}{ + AlertSetID: whereHelperint64{field: "\"structural_alert_sets\".\"alert_set_id\""}, + SetName: whereHelperstring{field: "\"structural_alert_sets\".\"set_name\""}, + Priority: whereHelperint16{field: "\"structural_alert_sets\".\"priority\""}, +} + +// StructuralAlertSetRels is where relationship names are stored. +var StructuralAlertSetRels = struct { + AlertSetStructuralAlerts string +}{ + AlertSetStructuralAlerts: "AlertSetStructuralAlerts", +} + +// structuralAlertSetR is where relationships are stored. +type structuralAlertSetR struct { + AlertSetStructuralAlerts StructuralAlertSlice `boil:"AlertSetStructuralAlerts" json:"AlertSetStructuralAlerts" toml:"AlertSetStructuralAlerts" yaml:"AlertSetStructuralAlerts"` +} + +// NewStruct creates a new relationship struct +func (*structuralAlertSetR) NewStruct() *structuralAlertSetR { + return &structuralAlertSetR{} +} + +func (r *structuralAlertSetR) GetAlertSetStructuralAlerts() StructuralAlertSlice { + if r == nil { + return nil + } + return r.AlertSetStructuralAlerts +} + +// structuralAlertSetL is where Load methods for each relationship are stored. +type structuralAlertSetL struct{} + +var ( + structuralAlertSetAllColumns = []string{"alert_set_id", "set_name", "priority"} + structuralAlertSetColumnsWithoutDefault = []string{"alert_set_id", "set_name", "priority"} + structuralAlertSetColumnsWithDefault = []string{} + structuralAlertSetPrimaryKeyColumns = []string{"alert_set_id"} + structuralAlertSetGeneratedColumns = []string{} +) + +type ( + // StructuralAlertSetSlice is an alias for a slice of pointers to StructuralAlertSet. + // This should almost always be used instead of []StructuralAlertSet. + StructuralAlertSetSlice []*StructuralAlertSet + // StructuralAlertSetHook is the signature for custom StructuralAlertSet hook methods + StructuralAlertSetHook func(context.Context, boil.ContextExecutor, *StructuralAlertSet) error + + structuralAlertSetQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + structuralAlertSetType = reflect.TypeOf(&StructuralAlertSet{}) + structuralAlertSetMapping = queries.MakeStructMapping(structuralAlertSetType) + structuralAlertSetPrimaryKeyMapping, _ = queries.BindMapping(structuralAlertSetType, structuralAlertSetMapping, structuralAlertSetPrimaryKeyColumns) + structuralAlertSetInsertCacheMut sync.RWMutex + structuralAlertSetInsertCache = make(map[string]insertCache) + structuralAlertSetUpdateCacheMut sync.RWMutex + structuralAlertSetUpdateCache = make(map[string]updateCache) + structuralAlertSetUpsertCacheMut sync.RWMutex + structuralAlertSetUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var structuralAlertSetAfterSelectHooks []StructuralAlertSetHook + +var structuralAlertSetBeforeInsertHooks []StructuralAlertSetHook +var structuralAlertSetAfterInsertHooks []StructuralAlertSetHook + +var structuralAlertSetBeforeUpdateHooks []StructuralAlertSetHook +var structuralAlertSetAfterUpdateHooks []StructuralAlertSetHook + +var structuralAlertSetBeforeDeleteHooks []StructuralAlertSetHook +var structuralAlertSetAfterDeleteHooks []StructuralAlertSetHook + +var structuralAlertSetBeforeUpsertHooks []StructuralAlertSetHook +var structuralAlertSetAfterUpsertHooks []StructuralAlertSetHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *StructuralAlertSet) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range structuralAlertSetAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *StructuralAlertSet) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range structuralAlertSetBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *StructuralAlertSet) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range structuralAlertSetAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *StructuralAlertSet) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range structuralAlertSetBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *StructuralAlertSet) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range structuralAlertSetAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *StructuralAlertSet) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range structuralAlertSetBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *StructuralAlertSet) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range structuralAlertSetAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *StructuralAlertSet) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range structuralAlertSetBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *StructuralAlertSet) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range structuralAlertSetAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddStructuralAlertSetHook registers your hook function for all future operations. +func AddStructuralAlertSetHook(hookPoint boil.HookPoint, structuralAlertSetHook StructuralAlertSetHook) { + switch hookPoint { + case boil.AfterSelectHook: + structuralAlertSetAfterSelectHooks = append(structuralAlertSetAfterSelectHooks, structuralAlertSetHook) + case boil.BeforeInsertHook: + structuralAlertSetBeforeInsertHooks = append(structuralAlertSetBeforeInsertHooks, structuralAlertSetHook) + case boil.AfterInsertHook: + structuralAlertSetAfterInsertHooks = append(structuralAlertSetAfterInsertHooks, structuralAlertSetHook) + case boil.BeforeUpdateHook: + structuralAlertSetBeforeUpdateHooks = append(structuralAlertSetBeforeUpdateHooks, structuralAlertSetHook) + case boil.AfterUpdateHook: + structuralAlertSetAfterUpdateHooks = append(structuralAlertSetAfterUpdateHooks, structuralAlertSetHook) + case boil.BeforeDeleteHook: + structuralAlertSetBeforeDeleteHooks = append(structuralAlertSetBeforeDeleteHooks, structuralAlertSetHook) + case boil.AfterDeleteHook: + structuralAlertSetAfterDeleteHooks = append(structuralAlertSetAfterDeleteHooks, structuralAlertSetHook) + case boil.BeforeUpsertHook: + structuralAlertSetBeforeUpsertHooks = append(structuralAlertSetBeforeUpsertHooks, structuralAlertSetHook) + case boil.AfterUpsertHook: + structuralAlertSetAfterUpsertHooks = append(structuralAlertSetAfterUpsertHooks, structuralAlertSetHook) + } +} + +// One returns a single structuralAlertSet record from the query. +func (q structuralAlertSetQuery) One(ctx context.Context, exec boil.ContextExecutor) (*StructuralAlertSet, error) { + o := &StructuralAlertSet{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for structural_alert_sets") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all StructuralAlertSet records from the query. +func (q structuralAlertSetQuery) All(ctx context.Context, exec boil.ContextExecutor) (StructuralAlertSetSlice, error) { + var o []*StructuralAlertSet + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to StructuralAlertSet slice") + } + + if len(structuralAlertSetAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all StructuralAlertSet records in the query. +func (q structuralAlertSetQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count structural_alert_sets rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q structuralAlertSetQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if structural_alert_sets exists") + } + + return count > 0, nil +} + +// AlertSetStructuralAlerts retrieves all the structural_alert's StructuralAlerts with an executor via alert_set_id column. +func (o *StructuralAlertSet) AlertSetStructuralAlerts(mods ...qm.QueryMod) structuralAlertQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"structural_alerts\".\"alert_set_id\"=?", o.AlertSetID), + ) + + return StructuralAlerts(queryMods...) +} + +// LoadAlertSetStructuralAlerts allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (structuralAlertSetL) LoadAlertSetStructuralAlerts(ctx context.Context, e boil.ContextExecutor, singular bool, maybeStructuralAlertSet interface{}, mods queries.Applicator) error { + var slice []*StructuralAlertSet + var object *StructuralAlertSet + + if singular { + object = maybeStructuralAlertSet.(*StructuralAlertSet) + } else { + slice = *maybeStructuralAlertSet.(*[]*StructuralAlertSet) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &structuralAlertSetR{} + } + args = append(args, object.AlertSetID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &structuralAlertSetR{} + } + + for _, a := range args { + if a == obj.AlertSetID { + continue Outer + } + } + + args = append(args, obj.AlertSetID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`structural_alerts`), + qm.WhereIn(`structural_alerts.alert_set_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load structural_alerts") + } + + var resultSlice []*StructuralAlert + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice structural_alerts") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on structural_alerts") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for structural_alerts") + } + + if len(structuralAlertAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.AlertSetStructuralAlerts = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &structuralAlertR{} + } + foreign.R.AlertSet = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.AlertSetID == foreign.AlertSetID { + local.R.AlertSetStructuralAlerts = append(local.R.AlertSetStructuralAlerts, foreign) + if foreign.R == nil { + foreign.R = &structuralAlertR{} + } + foreign.R.AlertSet = local + break + } + } + } + + return nil +} + +// AddAlertSetStructuralAlerts adds the given related objects to the existing relationships +// of the structural_alert_set, optionally inserting them as new records. +// Appends related to o.R.AlertSetStructuralAlerts. +// Sets related.R.AlertSet appropriately. +func (o *StructuralAlertSet) AddAlertSetStructuralAlerts(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*StructuralAlert) error { + var err error + for _, rel := range related { + if insert { + rel.AlertSetID = o.AlertSetID + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"structural_alerts\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"alert_set_id"}), + strmangle.WhereClause("\"", "\"", 0, structuralAlertPrimaryKeyColumns), + ) + values := []interface{}{o.AlertSetID, rel.AlertID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.AlertSetID = o.AlertSetID + } + } + + if o.R == nil { + o.R = &structuralAlertSetR{ + AlertSetStructuralAlerts: related, + } + } else { + o.R.AlertSetStructuralAlerts = append(o.R.AlertSetStructuralAlerts, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &structuralAlertR{ + AlertSet: o, + } + } else { + rel.R.AlertSet = o + } + } + return nil +} + +// StructuralAlertSets retrieves all the records using an executor. +func StructuralAlertSets(mods ...qm.QueryMod) structuralAlertSetQuery { + mods = append(mods, qm.From("\"structural_alert_sets\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"structural_alert_sets\".*"}) + } + + return structuralAlertSetQuery{q} +} + +// FindStructuralAlertSet retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindStructuralAlertSet(ctx context.Context, exec boil.ContextExecutor, alertSetID int64, selectCols ...string) (*StructuralAlertSet, error) { + structuralAlertSetObj := &StructuralAlertSet{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"structural_alert_sets\" where \"alert_set_id\"=?", sel, + ) + + q := queries.Raw(query, alertSetID) + + err := q.Bind(ctx, exec, structuralAlertSetObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from structural_alert_sets") + } + + if err = structuralAlertSetObj.doAfterSelectHooks(ctx, exec); err != nil { + return structuralAlertSetObj, err + } + + return structuralAlertSetObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *StructuralAlertSet) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no structural_alert_sets provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(structuralAlertSetColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + structuralAlertSetInsertCacheMut.RLock() + cache, cached := structuralAlertSetInsertCache[key] + structuralAlertSetInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + structuralAlertSetAllColumns, + structuralAlertSetColumnsWithDefault, + structuralAlertSetColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(structuralAlertSetType, structuralAlertSetMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(structuralAlertSetType, structuralAlertSetMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"structural_alert_sets\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"structural_alert_sets\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into structural_alert_sets") + } + + if !cached { + structuralAlertSetInsertCacheMut.Lock() + structuralAlertSetInsertCache[key] = cache + structuralAlertSetInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the StructuralAlertSet. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *StructuralAlertSet) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + structuralAlertSetUpdateCacheMut.RLock() + cache, cached := structuralAlertSetUpdateCache[key] + structuralAlertSetUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + structuralAlertSetAllColumns, + structuralAlertSetPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update structural_alert_sets, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"structural_alert_sets\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, structuralAlertSetPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(structuralAlertSetType, structuralAlertSetMapping, append(wl, structuralAlertSetPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update structural_alert_sets row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for structural_alert_sets") + } + + if !cached { + structuralAlertSetUpdateCacheMut.Lock() + structuralAlertSetUpdateCache[key] = cache + structuralAlertSetUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q structuralAlertSetQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for structural_alert_sets") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for structural_alert_sets") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o StructuralAlertSetSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), structuralAlertSetPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"structural_alert_sets\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, structuralAlertSetPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in structuralAlertSet slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all structuralAlertSet") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *StructuralAlertSet) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no structural_alert_sets provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(structuralAlertSetColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + structuralAlertSetUpsertCacheMut.RLock() + cache, cached := structuralAlertSetUpsertCache[key] + structuralAlertSetUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + structuralAlertSetAllColumns, + structuralAlertSetColumnsWithDefault, + structuralAlertSetColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + structuralAlertSetAllColumns, + structuralAlertSetPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert structural_alert_sets, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(structuralAlertSetPrimaryKeyColumns)) + copy(conflict, structuralAlertSetPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"structural_alert_sets\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(structuralAlertSetType, structuralAlertSetMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(structuralAlertSetType, structuralAlertSetMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert structural_alert_sets") + } + + if !cached { + structuralAlertSetUpsertCacheMut.Lock() + structuralAlertSetUpsertCache[key] = cache + structuralAlertSetUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single StructuralAlertSet record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *StructuralAlertSet) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no StructuralAlertSet provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), structuralAlertSetPrimaryKeyMapping) + sql := "DELETE FROM \"structural_alert_sets\" WHERE \"alert_set_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from structural_alert_sets") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for structural_alert_sets") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q structuralAlertSetQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no structuralAlertSetQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from structural_alert_sets") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for structural_alert_sets") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o StructuralAlertSetSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(structuralAlertSetBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), structuralAlertSetPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"structural_alert_sets\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, structuralAlertSetPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from structuralAlertSet slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for structural_alert_sets") + } + + if len(structuralAlertSetAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *StructuralAlertSet) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindStructuralAlertSet(ctx, exec, o.AlertSetID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *StructuralAlertSetSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := StructuralAlertSetSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), structuralAlertSetPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"structural_alert_sets\".* FROM \"structural_alert_sets\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, structuralAlertSetPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in StructuralAlertSetSlice") + } + + *o = slice + + return nil +} + +// StructuralAlertSetExists checks if the StructuralAlertSet row exists. +func StructuralAlertSetExists(ctx context.Context, exec boil.ContextExecutor, alertSetID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"structural_alert_sets\" where \"alert_set_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, alertSetID) + } + row := exec.QueryRowContext(ctx, sql, alertSetID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if structural_alert_sets exists") + } + + return exists, nil +} diff --git a/models/structural_alerts.go b/models/structural_alerts.go new file mode 100644 index 0000000..a02fb10 --- /dev/null +++ b/models/structural_alerts.go @@ -0,0 +1,1251 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// StructuralAlert is an object representing the database table. +type StructuralAlert struct { + AlertID int64 `boil:"alert_id" json:"alert_id" toml:"alert_id" yaml:"alert_id"` + AlertSetID int64 `boil:"alert_set_id" json:"alert_set_id" toml:"alert_set_id" yaml:"alert_set_id"` + AlertName string `boil:"alert_name" json:"alert_name" toml:"alert_name" yaml:"alert_name"` + Smarts string `boil:"smarts" json:"smarts" toml:"smarts" yaml:"smarts"` + + R *structuralAlertR `boil:"-" json:"-" toml:"-" yaml:"-"` + L structuralAlertL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var StructuralAlertColumns = struct { + AlertID string + AlertSetID string + AlertName string + Smarts string +}{ + AlertID: "alert_id", + AlertSetID: "alert_set_id", + AlertName: "alert_name", + Smarts: "smarts", +} + +var StructuralAlertTableColumns = struct { + AlertID string + AlertSetID string + AlertName string + Smarts string +}{ + AlertID: "structural_alerts.alert_id", + AlertSetID: "structural_alerts.alert_set_id", + AlertName: "structural_alerts.alert_name", + Smarts: "structural_alerts.smarts", +} + +// Generated where + +var StructuralAlertWhere = struct { + AlertID whereHelperint64 + AlertSetID whereHelperint64 + AlertName whereHelperstring + Smarts whereHelperstring +}{ + AlertID: whereHelperint64{field: "\"structural_alerts\".\"alert_id\""}, + AlertSetID: whereHelperint64{field: "\"structural_alerts\".\"alert_set_id\""}, + AlertName: whereHelperstring{field: "\"structural_alerts\".\"alert_name\""}, + Smarts: whereHelperstring{field: "\"structural_alerts\".\"smarts\""}, +} + +// StructuralAlertRels is where relationship names are stored. +var StructuralAlertRels = struct { + AlertSet string + AlertCompoundStructuralAlerts string +}{ + AlertSet: "AlertSet", + AlertCompoundStructuralAlerts: "AlertCompoundStructuralAlerts", +} + +// structuralAlertR is where relationships are stored. +type structuralAlertR struct { + AlertSet *StructuralAlertSet `boil:"AlertSet" json:"AlertSet" toml:"AlertSet" yaml:"AlertSet"` + AlertCompoundStructuralAlerts CompoundStructuralAlertSlice `boil:"AlertCompoundStructuralAlerts" json:"AlertCompoundStructuralAlerts" toml:"AlertCompoundStructuralAlerts" yaml:"AlertCompoundStructuralAlerts"` +} + +// NewStruct creates a new relationship struct +func (*structuralAlertR) NewStruct() *structuralAlertR { + return &structuralAlertR{} +} + +func (r *structuralAlertR) GetAlertSet() *StructuralAlertSet { + if r == nil { + return nil + } + return r.AlertSet +} + +func (r *structuralAlertR) GetAlertCompoundStructuralAlerts() CompoundStructuralAlertSlice { + if r == nil { + return nil + } + return r.AlertCompoundStructuralAlerts +} + +// structuralAlertL is where Load methods for each relationship are stored. +type structuralAlertL struct{} + +var ( + structuralAlertAllColumns = []string{"alert_id", "alert_set_id", "alert_name", "smarts"} + structuralAlertColumnsWithoutDefault = []string{"alert_id", "alert_set_id", "alert_name", "smarts"} + structuralAlertColumnsWithDefault = []string{} + structuralAlertPrimaryKeyColumns = []string{"alert_id"} + structuralAlertGeneratedColumns = []string{} +) + +type ( + // StructuralAlertSlice is an alias for a slice of pointers to StructuralAlert. + // This should almost always be used instead of []StructuralAlert. + StructuralAlertSlice []*StructuralAlert + // StructuralAlertHook is the signature for custom StructuralAlert hook methods + StructuralAlertHook func(context.Context, boil.ContextExecutor, *StructuralAlert) error + + structuralAlertQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + structuralAlertType = reflect.TypeOf(&StructuralAlert{}) + structuralAlertMapping = queries.MakeStructMapping(structuralAlertType) + structuralAlertPrimaryKeyMapping, _ = queries.BindMapping(structuralAlertType, structuralAlertMapping, structuralAlertPrimaryKeyColumns) + structuralAlertInsertCacheMut sync.RWMutex + structuralAlertInsertCache = make(map[string]insertCache) + structuralAlertUpdateCacheMut sync.RWMutex + structuralAlertUpdateCache = make(map[string]updateCache) + structuralAlertUpsertCacheMut sync.RWMutex + structuralAlertUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var structuralAlertAfterSelectHooks []StructuralAlertHook + +var structuralAlertBeforeInsertHooks []StructuralAlertHook +var structuralAlertAfterInsertHooks []StructuralAlertHook + +var structuralAlertBeforeUpdateHooks []StructuralAlertHook +var structuralAlertAfterUpdateHooks []StructuralAlertHook + +var structuralAlertBeforeDeleteHooks []StructuralAlertHook +var structuralAlertAfterDeleteHooks []StructuralAlertHook + +var structuralAlertBeforeUpsertHooks []StructuralAlertHook +var structuralAlertAfterUpsertHooks []StructuralAlertHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *StructuralAlert) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range structuralAlertAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *StructuralAlert) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range structuralAlertBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *StructuralAlert) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range structuralAlertAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *StructuralAlert) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range structuralAlertBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *StructuralAlert) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range structuralAlertAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *StructuralAlert) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range structuralAlertBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *StructuralAlert) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range structuralAlertAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *StructuralAlert) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range structuralAlertBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *StructuralAlert) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range structuralAlertAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddStructuralAlertHook registers your hook function for all future operations. +func AddStructuralAlertHook(hookPoint boil.HookPoint, structuralAlertHook StructuralAlertHook) { + switch hookPoint { + case boil.AfterSelectHook: + structuralAlertAfterSelectHooks = append(structuralAlertAfterSelectHooks, structuralAlertHook) + case boil.BeforeInsertHook: + structuralAlertBeforeInsertHooks = append(structuralAlertBeforeInsertHooks, structuralAlertHook) + case boil.AfterInsertHook: + structuralAlertAfterInsertHooks = append(structuralAlertAfterInsertHooks, structuralAlertHook) + case boil.BeforeUpdateHook: + structuralAlertBeforeUpdateHooks = append(structuralAlertBeforeUpdateHooks, structuralAlertHook) + case boil.AfterUpdateHook: + structuralAlertAfterUpdateHooks = append(structuralAlertAfterUpdateHooks, structuralAlertHook) + case boil.BeforeDeleteHook: + structuralAlertBeforeDeleteHooks = append(structuralAlertBeforeDeleteHooks, structuralAlertHook) + case boil.AfterDeleteHook: + structuralAlertAfterDeleteHooks = append(structuralAlertAfterDeleteHooks, structuralAlertHook) + case boil.BeforeUpsertHook: + structuralAlertBeforeUpsertHooks = append(structuralAlertBeforeUpsertHooks, structuralAlertHook) + case boil.AfterUpsertHook: + structuralAlertAfterUpsertHooks = append(structuralAlertAfterUpsertHooks, structuralAlertHook) + } +} + +// One returns a single structuralAlert record from the query. +func (q structuralAlertQuery) One(ctx context.Context, exec boil.ContextExecutor) (*StructuralAlert, error) { + o := &StructuralAlert{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for structural_alerts") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all StructuralAlert records from the query. +func (q structuralAlertQuery) All(ctx context.Context, exec boil.ContextExecutor) (StructuralAlertSlice, error) { + var o []*StructuralAlert + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to StructuralAlert slice") + } + + if len(structuralAlertAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all StructuralAlert records in the query. +func (q structuralAlertQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count structural_alerts rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q structuralAlertQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if structural_alerts exists") + } + + return count > 0, nil +} + +// AlertSet pointed to by the foreign key. +func (o *StructuralAlert) AlertSet(mods ...qm.QueryMod) structuralAlertSetQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"alert_set_id\" = ?", o.AlertSetID), + } + + queryMods = append(queryMods, mods...) + + return StructuralAlertSets(queryMods...) +} + +// AlertCompoundStructuralAlerts retrieves all the compound_structural_alert's CompoundStructuralAlerts with an executor via alert_id column. +func (o *StructuralAlert) AlertCompoundStructuralAlerts(mods ...qm.QueryMod) compoundStructuralAlertQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"compound_structural_alerts\".\"alert_id\"=?", o.AlertID), + ) + + return CompoundStructuralAlerts(queryMods...) +} + +// LoadAlertSet allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (structuralAlertL) LoadAlertSet(ctx context.Context, e boil.ContextExecutor, singular bool, maybeStructuralAlert interface{}, mods queries.Applicator) error { + var slice []*StructuralAlert + var object *StructuralAlert + + if singular { + object = maybeStructuralAlert.(*StructuralAlert) + } else { + slice = *maybeStructuralAlert.(*[]*StructuralAlert) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &structuralAlertR{} + } + args = append(args, object.AlertSetID) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &structuralAlertR{} + } + + for _, a := range args { + if a == obj.AlertSetID { + continue Outer + } + } + + args = append(args, obj.AlertSetID) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`structural_alert_sets`), + qm.WhereIn(`structural_alert_sets.alert_set_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load StructuralAlertSet") + } + + var resultSlice []*StructuralAlertSet + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice StructuralAlertSet") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for structural_alert_sets") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for structural_alert_sets") + } + + if len(structuralAlertAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.AlertSet = foreign + if foreign.R == nil { + foreign.R = &structuralAlertSetR{} + } + foreign.R.AlertSetStructuralAlerts = append(foreign.R.AlertSetStructuralAlerts, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.AlertSetID == foreign.AlertSetID { + local.R.AlertSet = foreign + if foreign.R == nil { + foreign.R = &structuralAlertSetR{} + } + foreign.R.AlertSetStructuralAlerts = append(foreign.R.AlertSetStructuralAlerts, local) + break + } + } + } + + return nil +} + +// LoadAlertCompoundStructuralAlerts allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (structuralAlertL) LoadAlertCompoundStructuralAlerts(ctx context.Context, e boil.ContextExecutor, singular bool, maybeStructuralAlert interface{}, mods queries.Applicator) error { + var slice []*StructuralAlert + var object *StructuralAlert + + if singular { + object = maybeStructuralAlert.(*StructuralAlert) + } else { + slice = *maybeStructuralAlert.(*[]*StructuralAlert) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &structuralAlertR{} + } + args = append(args, object.AlertID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &structuralAlertR{} + } + + for _, a := range args { + if a == obj.AlertID { + continue Outer + } + } + + args = append(args, obj.AlertID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`compound_structural_alerts`), + qm.WhereIn(`compound_structural_alerts.alert_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load compound_structural_alerts") + } + + var resultSlice []*CompoundStructuralAlert + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice compound_structural_alerts") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on compound_structural_alerts") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for compound_structural_alerts") + } + + if len(compoundStructuralAlertAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.AlertCompoundStructuralAlerts = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &compoundStructuralAlertR{} + } + foreign.R.Alert = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.AlertID == foreign.AlertID { + local.R.AlertCompoundStructuralAlerts = append(local.R.AlertCompoundStructuralAlerts, foreign) + if foreign.R == nil { + foreign.R = &compoundStructuralAlertR{} + } + foreign.R.Alert = local + break + } + } + } + + return nil +} + +// SetAlertSet of the structuralAlert to the related item. +// Sets o.R.AlertSet to related. +// Adds o to related.R.AlertSetStructuralAlerts. +func (o *StructuralAlert) SetAlertSet(ctx context.Context, exec boil.ContextExecutor, insert bool, related *StructuralAlertSet) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"structural_alerts\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"alert_set_id"}), + strmangle.WhereClause("\"", "\"", 0, structuralAlertPrimaryKeyColumns), + ) + values := []interface{}{related.AlertSetID, o.AlertID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.AlertSetID = related.AlertSetID + if o.R == nil { + o.R = &structuralAlertR{ + AlertSet: related, + } + } else { + o.R.AlertSet = related + } + + if related.R == nil { + related.R = &structuralAlertSetR{ + AlertSetStructuralAlerts: StructuralAlertSlice{o}, + } + } else { + related.R.AlertSetStructuralAlerts = append(related.R.AlertSetStructuralAlerts, o) + } + + return nil +} + +// AddAlertCompoundStructuralAlerts adds the given related objects to the existing relationships +// of the structural_alert, optionally inserting them as new records. +// Appends related to o.R.AlertCompoundStructuralAlerts. +// Sets related.R.Alert appropriately. +func (o *StructuralAlert) AddAlertCompoundStructuralAlerts(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*CompoundStructuralAlert) error { + var err error + for _, rel := range related { + if insert { + rel.AlertID = o.AlertID + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"compound_structural_alerts\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"alert_id"}), + strmangle.WhereClause("\"", "\"", 0, compoundStructuralAlertPrimaryKeyColumns), + ) + values := []interface{}{o.AlertID, rel.CPDSTRAlertID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.AlertID = o.AlertID + } + } + + if o.R == nil { + o.R = &structuralAlertR{ + AlertCompoundStructuralAlerts: related, + } + } else { + o.R.AlertCompoundStructuralAlerts = append(o.R.AlertCompoundStructuralAlerts, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &compoundStructuralAlertR{ + Alert: o, + } + } else { + rel.R.Alert = o + } + } + return nil +} + +// StructuralAlerts retrieves all the records using an executor. +func StructuralAlerts(mods ...qm.QueryMod) structuralAlertQuery { + mods = append(mods, qm.From("\"structural_alerts\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"structural_alerts\".*"}) + } + + return structuralAlertQuery{q} +} + +// FindStructuralAlert retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindStructuralAlert(ctx context.Context, exec boil.ContextExecutor, alertID int64, selectCols ...string) (*StructuralAlert, error) { + structuralAlertObj := &StructuralAlert{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"structural_alerts\" where \"alert_id\"=?", sel, + ) + + q := queries.Raw(query, alertID) + + err := q.Bind(ctx, exec, structuralAlertObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from structural_alerts") + } + + if err = structuralAlertObj.doAfterSelectHooks(ctx, exec); err != nil { + return structuralAlertObj, err + } + + return structuralAlertObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *StructuralAlert) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no structural_alerts provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(structuralAlertColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + structuralAlertInsertCacheMut.RLock() + cache, cached := structuralAlertInsertCache[key] + structuralAlertInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + structuralAlertAllColumns, + structuralAlertColumnsWithDefault, + structuralAlertColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(structuralAlertType, structuralAlertMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(structuralAlertType, structuralAlertMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"structural_alerts\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"structural_alerts\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into structural_alerts") + } + + if !cached { + structuralAlertInsertCacheMut.Lock() + structuralAlertInsertCache[key] = cache + structuralAlertInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the StructuralAlert. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *StructuralAlert) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + structuralAlertUpdateCacheMut.RLock() + cache, cached := structuralAlertUpdateCache[key] + structuralAlertUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + structuralAlertAllColumns, + structuralAlertPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update structural_alerts, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"structural_alerts\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, structuralAlertPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(structuralAlertType, structuralAlertMapping, append(wl, structuralAlertPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update structural_alerts row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for structural_alerts") + } + + if !cached { + structuralAlertUpdateCacheMut.Lock() + structuralAlertUpdateCache[key] = cache + structuralAlertUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q structuralAlertQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for structural_alerts") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for structural_alerts") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o StructuralAlertSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), structuralAlertPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"structural_alerts\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, structuralAlertPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in structuralAlert slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all structuralAlert") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *StructuralAlert) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no structural_alerts provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(structuralAlertColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + structuralAlertUpsertCacheMut.RLock() + cache, cached := structuralAlertUpsertCache[key] + structuralAlertUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + structuralAlertAllColumns, + structuralAlertColumnsWithDefault, + structuralAlertColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + structuralAlertAllColumns, + structuralAlertPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert structural_alerts, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(structuralAlertPrimaryKeyColumns)) + copy(conflict, structuralAlertPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"structural_alerts\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(structuralAlertType, structuralAlertMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(structuralAlertType, structuralAlertMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert structural_alerts") + } + + if !cached { + structuralAlertUpsertCacheMut.Lock() + structuralAlertUpsertCache[key] = cache + structuralAlertUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single StructuralAlert record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *StructuralAlert) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no StructuralAlert provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), structuralAlertPrimaryKeyMapping) + sql := "DELETE FROM \"structural_alerts\" WHERE \"alert_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from structural_alerts") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for structural_alerts") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q structuralAlertQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no structuralAlertQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from structural_alerts") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for structural_alerts") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o StructuralAlertSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(structuralAlertBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), structuralAlertPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"structural_alerts\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, structuralAlertPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from structuralAlert slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for structural_alerts") + } + + if len(structuralAlertAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *StructuralAlert) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindStructuralAlert(ctx, exec, o.AlertID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *StructuralAlertSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := StructuralAlertSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), structuralAlertPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"structural_alerts\".* FROM \"structural_alerts\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, structuralAlertPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in StructuralAlertSlice") + } + + *o = slice + + return nil +} + +// StructuralAlertExists checks if the StructuralAlert row exists. +func StructuralAlertExists(ctx context.Context, exec boil.ContextExecutor, alertID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"structural_alerts\" where \"alert_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, alertID) + } + row := exec.QueryRowContext(ctx, sql, alertID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if structural_alerts exists") + } + + return exists, nil +} diff --git a/models/target_components.go b/models/target_components.go new file mode 100644 index 0000000..8c23c50 --- /dev/null +++ b/models/target_components.go @@ -0,0 +1,1248 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// TargetComponent is an object representing the database table. +type TargetComponent struct { + Tid int64 `boil:"tid" json:"tid" toml:"tid" yaml:"tid"` + ComponentID int64 `boil:"component_id" json:"component_id" toml:"component_id" yaml:"component_id"` + TargcompID int64 `boil:"targcomp_id" json:"targcomp_id" toml:"targcomp_id" yaml:"targcomp_id"` + Homologue int16 `boil:"homologue" json:"homologue" toml:"homologue" yaml:"homologue"` + + R *targetComponentR `boil:"-" json:"-" toml:"-" yaml:"-"` + L targetComponentL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var TargetComponentColumns = struct { + Tid string + ComponentID string + TargcompID string + Homologue string +}{ + Tid: "tid", + ComponentID: "component_id", + TargcompID: "targcomp_id", + Homologue: "homologue", +} + +var TargetComponentTableColumns = struct { + Tid string + ComponentID string + TargcompID string + Homologue string +}{ + Tid: "target_components.tid", + ComponentID: "target_components.component_id", + TargcompID: "target_components.targcomp_id", + Homologue: "target_components.homologue", +} + +// Generated where + +var TargetComponentWhere = struct { + Tid whereHelperint64 + ComponentID whereHelperint64 + TargcompID whereHelperint64 + Homologue whereHelperint16 +}{ + Tid: whereHelperint64{field: "\"target_components\".\"tid\""}, + ComponentID: whereHelperint64{field: "\"target_components\".\"component_id\""}, + TargcompID: whereHelperint64{field: "\"target_components\".\"targcomp_id\""}, + Homologue: whereHelperint16{field: "\"target_components\".\"homologue\""}, +} + +// TargetComponentRels is where relationship names are stored. +var TargetComponentRels = struct { + TidTargetDictionary string + Component string +}{ + TidTargetDictionary: "TidTargetDictionary", + Component: "Component", +} + +// targetComponentR is where relationships are stored. +type targetComponentR struct { + TidTargetDictionary *TargetDictionary `boil:"TidTargetDictionary" json:"TidTargetDictionary" toml:"TidTargetDictionary" yaml:"TidTargetDictionary"` + Component *ComponentSequence `boil:"Component" json:"Component" toml:"Component" yaml:"Component"` +} + +// NewStruct creates a new relationship struct +func (*targetComponentR) NewStruct() *targetComponentR { + return &targetComponentR{} +} + +func (r *targetComponentR) GetTidTargetDictionary() *TargetDictionary { + if r == nil { + return nil + } + return r.TidTargetDictionary +} + +func (r *targetComponentR) GetComponent() *ComponentSequence { + if r == nil { + return nil + } + return r.Component +} + +// targetComponentL is where Load methods for each relationship are stored. +type targetComponentL struct{} + +var ( + targetComponentAllColumns = []string{"tid", "component_id", "targcomp_id", "homologue"} + targetComponentColumnsWithoutDefault = []string{"tid", "component_id", "targcomp_id", "homologue"} + targetComponentColumnsWithDefault = []string{} + targetComponentPrimaryKeyColumns = []string{"targcomp_id"} + targetComponentGeneratedColumns = []string{} +) + +type ( + // TargetComponentSlice is an alias for a slice of pointers to TargetComponent. + // This should almost always be used instead of []TargetComponent. + TargetComponentSlice []*TargetComponent + // TargetComponentHook is the signature for custom TargetComponent hook methods + TargetComponentHook func(context.Context, boil.ContextExecutor, *TargetComponent) error + + targetComponentQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + targetComponentType = reflect.TypeOf(&TargetComponent{}) + targetComponentMapping = queries.MakeStructMapping(targetComponentType) + targetComponentPrimaryKeyMapping, _ = queries.BindMapping(targetComponentType, targetComponentMapping, targetComponentPrimaryKeyColumns) + targetComponentInsertCacheMut sync.RWMutex + targetComponentInsertCache = make(map[string]insertCache) + targetComponentUpdateCacheMut sync.RWMutex + targetComponentUpdateCache = make(map[string]updateCache) + targetComponentUpsertCacheMut sync.RWMutex + targetComponentUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var targetComponentAfterSelectHooks []TargetComponentHook + +var targetComponentBeforeInsertHooks []TargetComponentHook +var targetComponentAfterInsertHooks []TargetComponentHook + +var targetComponentBeforeUpdateHooks []TargetComponentHook +var targetComponentAfterUpdateHooks []TargetComponentHook + +var targetComponentBeforeDeleteHooks []TargetComponentHook +var targetComponentAfterDeleteHooks []TargetComponentHook + +var targetComponentBeforeUpsertHooks []TargetComponentHook +var targetComponentAfterUpsertHooks []TargetComponentHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *TargetComponent) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range targetComponentAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *TargetComponent) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range targetComponentBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *TargetComponent) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range targetComponentAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *TargetComponent) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range targetComponentBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *TargetComponent) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range targetComponentAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *TargetComponent) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range targetComponentBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *TargetComponent) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range targetComponentAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *TargetComponent) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range targetComponentBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *TargetComponent) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range targetComponentAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddTargetComponentHook registers your hook function for all future operations. +func AddTargetComponentHook(hookPoint boil.HookPoint, targetComponentHook TargetComponentHook) { + switch hookPoint { + case boil.AfterSelectHook: + targetComponentAfterSelectHooks = append(targetComponentAfterSelectHooks, targetComponentHook) + case boil.BeforeInsertHook: + targetComponentBeforeInsertHooks = append(targetComponentBeforeInsertHooks, targetComponentHook) + case boil.AfterInsertHook: + targetComponentAfterInsertHooks = append(targetComponentAfterInsertHooks, targetComponentHook) + case boil.BeforeUpdateHook: + targetComponentBeforeUpdateHooks = append(targetComponentBeforeUpdateHooks, targetComponentHook) + case boil.AfterUpdateHook: + targetComponentAfterUpdateHooks = append(targetComponentAfterUpdateHooks, targetComponentHook) + case boil.BeforeDeleteHook: + targetComponentBeforeDeleteHooks = append(targetComponentBeforeDeleteHooks, targetComponentHook) + case boil.AfterDeleteHook: + targetComponentAfterDeleteHooks = append(targetComponentAfterDeleteHooks, targetComponentHook) + case boil.BeforeUpsertHook: + targetComponentBeforeUpsertHooks = append(targetComponentBeforeUpsertHooks, targetComponentHook) + case boil.AfterUpsertHook: + targetComponentAfterUpsertHooks = append(targetComponentAfterUpsertHooks, targetComponentHook) + } +} + +// One returns a single targetComponent record from the query. +func (q targetComponentQuery) One(ctx context.Context, exec boil.ContextExecutor) (*TargetComponent, error) { + o := &TargetComponent{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for target_components") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all TargetComponent records from the query. +func (q targetComponentQuery) All(ctx context.Context, exec boil.ContextExecutor) (TargetComponentSlice, error) { + var o []*TargetComponent + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to TargetComponent slice") + } + + if len(targetComponentAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all TargetComponent records in the query. +func (q targetComponentQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count target_components rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q targetComponentQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if target_components exists") + } + + return count > 0, nil +} + +// TidTargetDictionary pointed to by the foreign key. +func (o *TargetComponent) TidTargetDictionary(mods ...qm.QueryMod) targetDictionaryQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"tid\" = ?", o.Tid), + } + + queryMods = append(queryMods, mods...) + + return TargetDictionaries(queryMods...) +} + +// Component pointed to by the foreign key. +func (o *TargetComponent) Component(mods ...qm.QueryMod) componentSequenceQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"component_id\" = ?", o.ComponentID), + } + + queryMods = append(queryMods, mods...) + + return ComponentSequences(queryMods...) +} + +// LoadTidTargetDictionary allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (targetComponentL) LoadTidTargetDictionary(ctx context.Context, e boil.ContextExecutor, singular bool, maybeTargetComponent interface{}, mods queries.Applicator) error { + var slice []*TargetComponent + var object *TargetComponent + + if singular { + object = maybeTargetComponent.(*TargetComponent) + } else { + slice = *maybeTargetComponent.(*[]*TargetComponent) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &targetComponentR{} + } + args = append(args, object.Tid) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &targetComponentR{} + } + + for _, a := range args { + if a == obj.Tid { + continue Outer + } + } + + args = append(args, obj.Tid) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`target_dictionary`), + qm.WhereIn(`target_dictionary.tid in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load TargetDictionary") + } + + var resultSlice []*TargetDictionary + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice TargetDictionary") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for target_dictionary") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for target_dictionary") + } + + if len(targetComponentAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.TidTargetDictionary = foreign + if foreign.R == nil { + foreign.R = &targetDictionaryR{} + } + foreign.R.TidTargetComponents = append(foreign.R.TidTargetComponents, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.Tid == foreign.Tid { + local.R.TidTargetDictionary = foreign + if foreign.R == nil { + foreign.R = &targetDictionaryR{} + } + foreign.R.TidTargetComponents = append(foreign.R.TidTargetComponents, local) + break + } + } + } + + return nil +} + +// LoadComponent allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (targetComponentL) LoadComponent(ctx context.Context, e boil.ContextExecutor, singular bool, maybeTargetComponent interface{}, mods queries.Applicator) error { + var slice []*TargetComponent + var object *TargetComponent + + if singular { + object = maybeTargetComponent.(*TargetComponent) + } else { + slice = *maybeTargetComponent.(*[]*TargetComponent) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &targetComponentR{} + } + args = append(args, object.ComponentID) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &targetComponentR{} + } + + for _, a := range args { + if a == obj.ComponentID { + continue Outer + } + } + + args = append(args, obj.ComponentID) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`component_sequences`), + qm.WhereIn(`component_sequences.component_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load ComponentSequence") + } + + var resultSlice []*ComponentSequence + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice ComponentSequence") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for component_sequences") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for component_sequences") + } + + if len(targetComponentAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Component = foreign + if foreign.R == nil { + foreign.R = &componentSequenceR{} + } + foreign.R.ComponentTargetComponents = append(foreign.R.ComponentTargetComponents, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.ComponentID == foreign.ComponentID { + local.R.Component = foreign + if foreign.R == nil { + foreign.R = &componentSequenceR{} + } + foreign.R.ComponentTargetComponents = append(foreign.R.ComponentTargetComponents, local) + break + } + } + } + + return nil +} + +// SetTidTargetDictionary of the targetComponent to the related item. +// Sets o.R.TidTargetDictionary to related. +// Adds o to related.R.TidTargetComponents. +func (o *TargetComponent) SetTidTargetDictionary(ctx context.Context, exec boil.ContextExecutor, insert bool, related *TargetDictionary) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"target_components\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"tid"}), + strmangle.WhereClause("\"", "\"", 0, targetComponentPrimaryKeyColumns), + ) + values := []interface{}{related.Tid, o.TargcompID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.Tid = related.Tid + if o.R == nil { + o.R = &targetComponentR{ + TidTargetDictionary: related, + } + } else { + o.R.TidTargetDictionary = related + } + + if related.R == nil { + related.R = &targetDictionaryR{ + TidTargetComponents: TargetComponentSlice{o}, + } + } else { + related.R.TidTargetComponents = append(related.R.TidTargetComponents, o) + } + + return nil +} + +// SetComponent of the targetComponent to the related item. +// Sets o.R.Component to related. +// Adds o to related.R.ComponentTargetComponents. +func (o *TargetComponent) SetComponent(ctx context.Context, exec boil.ContextExecutor, insert bool, related *ComponentSequence) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"target_components\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"component_id"}), + strmangle.WhereClause("\"", "\"", 0, targetComponentPrimaryKeyColumns), + ) + values := []interface{}{related.ComponentID, o.TargcompID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.ComponentID = related.ComponentID + if o.R == nil { + o.R = &targetComponentR{ + Component: related, + } + } else { + o.R.Component = related + } + + if related.R == nil { + related.R = &componentSequenceR{ + ComponentTargetComponents: TargetComponentSlice{o}, + } + } else { + related.R.ComponentTargetComponents = append(related.R.ComponentTargetComponents, o) + } + + return nil +} + +// TargetComponents retrieves all the records using an executor. +func TargetComponents(mods ...qm.QueryMod) targetComponentQuery { + mods = append(mods, qm.From("\"target_components\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"target_components\".*"}) + } + + return targetComponentQuery{q} +} + +// FindTargetComponent retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindTargetComponent(ctx context.Context, exec boil.ContextExecutor, targcompID int64, selectCols ...string) (*TargetComponent, error) { + targetComponentObj := &TargetComponent{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"target_components\" where \"targcomp_id\"=?", sel, + ) + + q := queries.Raw(query, targcompID) + + err := q.Bind(ctx, exec, targetComponentObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from target_components") + } + + if err = targetComponentObj.doAfterSelectHooks(ctx, exec); err != nil { + return targetComponentObj, err + } + + return targetComponentObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *TargetComponent) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no target_components provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(targetComponentColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + targetComponentInsertCacheMut.RLock() + cache, cached := targetComponentInsertCache[key] + targetComponentInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + targetComponentAllColumns, + targetComponentColumnsWithDefault, + targetComponentColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(targetComponentType, targetComponentMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(targetComponentType, targetComponentMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"target_components\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"target_components\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into target_components") + } + + if !cached { + targetComponentInsertCacheMut.Lock() + targetComponentInsertCache[key] = cache + targetComponentInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the TargetComponent. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *TargetComponent) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + targetComponentUpdateCacheMut.RLock() + cache, cached := targetComponentUpdateCache[key] + targetComponentUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + targetComponentAllColumns, + targetComponentPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update target_components, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"target_components\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, targetComponentPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(targetComponentType, targetComponentMapping, append(wl, targetComponentPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update target_components row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for target_components") + } + + if !cached { + targetComponentUpdateCacheMut.Lock() + targetComponentUpdateCache[key] = cache + targetComponentUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q targetComponentQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for target_components") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for target_components") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o TargetComponentSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), targetComponentPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"target_components\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, targetComponentPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in targetComponent slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all targetComponent") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *TargetComponent) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no target_components provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(targetComponentColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + targetComponentUpsertCacheMut.RLock() + cache, cached := targetComponentUpsertCache[key] + targetComponentUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + targetComponentAllColumns, + targetComponentColumnsWithDefault, + targetComponentColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + targetComponentAllColumns, + targetComponentPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert target_components, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(targetComponentPrimaryKeyColumns)) + copy(conflict, targetComponentPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"target_components\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(targetComponentType, targetComponentMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(targetComponentType, targetComponentMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert target_components") + } + + if !cached { + targetComponentUpsertCacheMut.Lock() + targetComponentUpsertCache[key] = cache + targetComponentUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single TargetComponent record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *TargetComponent) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no TargetComponent provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), targetComponentPrimaryKeyMapping) + sql := "DELETE FROM \"target_components\" WHERE \"targcomp_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from target_components") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for target_components") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q targetComponentQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no targetComponentQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from target_components") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for target_components") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o TargetComponentSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(targetComponentBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), targetComponentPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"target_components\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, targetComponentPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from targetComponent slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for target_components") + } + + if len(targetComponentAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *TargetComponent) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindTargetComponent(ctx, exec, o.TargcompID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *TargetComponentSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := TargetComponentSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), targetComponentPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"target_components\".* FROM \"target_components\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, targetComponentPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in TargetComponentSlice") + } + + *o = slice + + return nil +} + +// TargetComponentExists checks if the TargetComponent row exists. +func TargetComponentExists(ctx context.Context, exec boil.ContextExecutor, targcompID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"target_components\" where \"targcomp_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, targcompID) + } + row := exec.QueryRowContext(ctx, sql, targcompID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if target_components exists") + } + + return exists, nil +} diff --git a/models/target_dictionary.go b/models/target_dictionary.go new file mode 100644 index 0000000..f6e5e01 --- /dev/null +++ b/models/target_dictionary.go @@ -0,0 +1,2828 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// TargetDictionary is an object representing the database table. +type TargetDictionary struct { + Tid int64 `boil:"tid" json:"tid" toml:"tid" yaml:"tid"` + TargetType null.String `boil:"target_type" json:"target_type,omitempty" toml:"target_type" yaml:"target_type,omitempty"` + PrefName string `boil:"pref_name" json:"pref_name" toml:"pref_name" yaml:"pref_name"` + TaxID null.Int64 `boil:"tax_id" json:"tax_id,omitempty" toml:"tax_id" yaml:"tax_id,omitempty"` + Organism null.String `boil:"organism" json:"organism,omitempty" toml:"organism" yaml:"organism,omitempty"` + ChemblID string `boil:"chembl_id" json:"chembl_id" toml:"chembl_id" yaml:"chembl_id"` + SpeciesGroupFlag int16 `boil:"species_group_flag" json:"species_group_flag" toml:"species_group_flag" yaml:"species_group_flag"` + + R *targetDictionaryR `boil:"-" json:"-" toml:"-" yaml:"-"` + L targetDictionaryL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var TargetDictionaryColumns = struct { + Tid string + TargetType string + PrefName string + TaxID string + Organism string + ChemblID string + SpeciesGroupFlag string +}{ + Tid: "tid", + TargetType: "target_type", + PrefName: "pref_name", + TaxID: "tax_id", + Organism: "organism", + ChemblID: "chembl_id", + SpeciesGroupFlag: "species_group_flag", +} + +var TargetDictionaryTableColumns = struct { + Tid string + TargetType string + PrefName string + TaxID string + Organism string + ChemblID string + SpeciesGroupFlag string +}{ + Tid: "target_dictionary.tid", + TargetType: "target_dictionary.target_type", + PrefName: "target_dictionary.pref_name", + TaxID: "target_dictionary.tax_id", + Organism: "target_dictionary.organism", + ChemblID: "target_dictionary.chembl_id", + SpeciesGroupFlag: "target_dictionary.species_group_flag", +} + +// Generated where + +var TargetDictionaryWhere = struct { + Tid whereHelperint64 + TargetType whereHelpernull_String + PrefName whereHelperstring + TaxID whereHelpernull_Int64 + Organism whereHelpernull_String + ChemblID whereHelperstring + SpeciesGroupFlag whereHelperint16 +}{ + Tid: whereHelperint64{field: "\"target_dictionary\".\"tid\""}, + TargetType: whereHelpernull_String{field: "\"target_dictionary\".\"target_type\""}, + PrefName: whereHelperstring{field: "\"target_dictionary\".\"pref_name\""}, + TaxID: whereHelpernull_Int64{field: "\"target_dictionary\".\"tax_id\""}, + Organism: whereHelpernull_String{field: "\"target_dictionary\".\"organism\""}, + ChemblID: whereHelperstring{field: "\"target_dictionary\".\"chembl_id\""}, + SpeciesGroupFlag: whereHelperint16{field: "\"target_dictionary\".\"species_group_flag\""}, +} + +// TargetDictionaryRels is where relationship names are stored. +var TargetDictionaryRels = struct { + TargetDictionaryTargetType string + Chembl string + TidAssays string + TidBindingSites string + TidDrugMechanisms string + EnzymeTidMetabolisms string + TidTargetComponents string + TidTargetRelations string + RelatedTidTargetRelations string +}{ + TargetDictionaryTargetType: "TargetDictionaryTargetType", + Chembl: "Chembl", + TidAssays: "TidAssays", + TidBindingSites: "TidBindingSites", + TidDrugMechanisms: "TidDrugMechanisms", + EnzymeTidMetabolisms: "EnzymeTidMetabolisms", + TidTargetComponents: "TidTargetComponents", + TidTargetRelations: "TidTargetRelations", + RelatedTidTargetRelations: "RelatedTidTargetRelations", +} + +// targetDictionaryR is where relationships are stored. +type targetDictionaryR struct { + TargetDictionaryTargetType *TargetType `boil:"TargetDictionaryTargetType" json:"TargetDictionaryTargetType" toml:"TargetDictionaryTargetType" yaml:"TargetDictionaryTargetType"` + Chembl *ChemblIDLookup `boil:"Chembl" json:"Chembl" toml:"Chembl" yaml:"Chembl"` + TidAssays AssaySlice `boil:"TidAssays" json:"TidAssays" toml:"TidAssays" yaml:"TidAssays"` + TidBindingSites BindingSiteSlice `boil:"TidBindingSites" json:"TidBindingSites" toml:"TidBindingSites" yaml:"TidBindingSites"` + TidDrugMechanisms DrugMechanismSlice `boil:"TidDrugMechanisms" json:"TidDrugMechanisms" toml:"TidDrugMechanisms" yaml:"TidDrugMechanisms"` + EnzymeTidMetabolisms MetabolismSlice `boil:"EnzymeTidMetabolisms" json:"EnzymeTidMetabolisms" toml:"EnzymeTidMetabolisms" yaml:"EnzymeTidMetabolisms"` + TidTargetComponents TargetComponentSlice `boil:"TidTargetComponents" json:"TidTargetComponents" toml:"TidTargetComponents" yaml:"TidTargetComponents"` + TidTargetRelations TargetRelationSlice `boil:"TidTargetRelations" json:"TidTargetRelations" toml:"TidTargetRelations" yaml:"TidTargetRelations"` + RelatedTidTargetRelations TargetRelationSlice `boil:"RelatedTidTargetRelations" json:"RelatedTidTargetRelations" toml:"RelatedTidTargetRelations" yaml:"RelatedTidTargetRelations"` +} + +// NewStruct creates a new relationship struct +func (*targetDictionaryR) NewStruct() *targetDictionaryR { + return &targetDictionaryR{} +} + +func (r *targetDictionaryR) GetTargetDictionaryTargetType() *TargetType { + if r == nil { + return nil + } + return r.TargetDictionaryTargetType +} + +func (r *targetDictionaryR) GetChembl() *ChemblIDLookup { + if r == nil { + return nil + } + return r.Chembl +} + +func (r *targetDictionaryR) GetTidAssays() AssaySlice { + if r == nil { + return nil + } + return r.TidAssays +} + +func (r *targetDictionaryR) GetTidBindingSites() BindingSiteSlice { + if r == nil { + return nil + } + return r.TidBindingSites +} + +func (r *targetDictionaryR) GetTidDrugMechanisms() DrugMechanismSlice { + if r == nil { + return nil + } + return r.TidDrugMechanisms +} + +func (r *targetDictionaryR) GetEnzymeTidMetabolisms() MetabolismSlice { + if r == nil { + return nil + } + return r.EnzymeTidMetabolisms +} + +func (r *targetDictionaryR) GetTidTargetComponents() TargetComponentSlice { + if r == nil { + return nil + } + return r.TidTargetComponents +} + +func (r *targetDictionaryR) GetTidTargetRelations() TargetRelationSlice { + if r == nil { + return nil + } + return r.TidTargetRelations +} + +func (r *targetDictionaryR) GetRelatedTidTargetRelations() TargetRelationSlice { + if r == nil { + return nil + } + return r.RelatedTidTargetRelations +} + +// targetDictionaryL is where Load methods for each relationship are stored. +type targetDictionaryL struct{} + +var ( + targetDictionaryAllColumns = []string{"tid", "target_type", "pref_name", "tax_id", "organism", "chembl_id", "species_group_flag"} + targetDictionaryColumnsWithoutDefault = []string{"tid", "pref_name", "chembl_id", "species_group_flag"} + targetDictionaryColumnsWithDefault = []string{"target_type", "tax_id", "organism"} + targetDictionaryPrimaryKeyColumns = []string{"tid"} + targetDictionaryGeneratedColumns = []string{} +) + +type ( + // TargetDictionarySlice is an alias for a slice of pointers to TargetDictionary. + // This should almost always be used instead of []TargetDictionary. + TargetDictionarySlice []*TargetDictionary + // TargetDictionaryHook is the signature for custom TargetDictionary hook methods + TargetDictionaryHook func(context.Context, boil.ContextExecutor, *TargetDictionary) error + + targetDictionaryQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + targetDictionaryType = reflect.TypeOf(&TargetDictionary{}) + targetDictionaryMapping = queries.MakeStructMapping(targetDictionaryType) + targetDictionaryPrimaryKeyMapping, _ = queries.BindMapping(targetDictionaryType, targetDictionaryMapping, targetDictionaryPrimaryKeyColumns) + targetDictionaryInsertCacheMut sync.RWMutex + targetDictionaryInsertCache = make(map[string]insertCache) + targetDictionaryUpdateCacheMut sync.RWMutex + targetDictionaryUpdateCache = make(map[string]updateCache) + targetDictionaryUpsertCacheMut sync.RWMutex + targetDictionaryUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var targetDictionaryAfterSelectHooks []TargetDictionaryHook + +var targetDictionaryBeforeInsertHooks []TargetDictionaryHook +var targetDictionaryAfterInsertHooks []TargetDictionaryHook + +var targetDictionaryBeforeUpdateHooks []TargetDictionaryHook +var targetDictionaryAfterUpdateHooks []TargetDictionaryHook + +var targetDictionaryBeforeDeleteHooks []TargetDictionaryHook +var targetDictionaryAfterDeleteHooks []TargetDictionaryHook + +var targetDictionaryBeforeUpsertHooks []TargetDictionaryHook +var targetDictionaryAfterUpsertHooks []TargetDictionaryHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *TargetDictionary) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range targetDictionaryAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *TargetDictionary) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range targetDictionaryBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *TargetDictionary) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range targetDictionaryAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *TargetDictionary) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range targetDictionaryBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *TargetDictionary) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range targetDictionaryAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *TargetDictionary) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range targetDictionaryBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *TargetDictionary) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range targetDictionaryAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *TargetDictionary) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range targetDictionaryBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *TargetDictionary) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range targetDictionaryAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddTargetDictionaryHook registers your hook function for all future operations. +func AddTargetDictionaryHook(hookPoint boil.HookPoint, targetDictionaryHook TargetDictionaryHook) { + switch hookPoint { + case boil.AfterSelectHook: + targetDictionaryAfterSelectHooks = append(targetDictionaryAfterSelectHooks, targetDictionaryHook) + case boil.BeforeInsertHook: + targetDictionaryBeforeInsertHooks = append(targetDictionaryBeforeInsertHooks, targetDictionaryHook) + case boil.AfterInsertHook: + targetDictionaryAfterInsertHooks = append(targetDictionaryAfterInsertHooks, targetDictionaryHook) + case boil.BeforeUpdateHook: + targetDictionaryBeforeUpdateHooks = append(targetDictionaryBeforeUpdateHooks, targetDictionaryHook) + case boil.AfterUpdateHook: + targetDictionaryAfterUpdateHooks = append(targetDictionaryAfterUpdateHooks, targetDictionaryHook) + case boil.BeforeDeleteHook: + targetDictionaryBeforeDeleteHooks = append(targetDictionaryBeforeDeleteHooks, targetDictionaryHook) + case boil.AfterDeleteHook: + targetDictionaryAfterDeleteHooks = append(targetDictionaryAfterDeleteHooks, targetDictionaryHook) + case boil.BeforeUpsertHook: + targetDictionaryBeforeUpsertHooks = append(targetDictionaryBeforeUpsertHooks, targetDictionaryHook) + case boil.AfterUpsertHook: + targetDictionaryAfterUpsertHooks = append(targetDictionaryAfterUpsertHooks, targetDictionaryHook) + } +} + +// One returns a single targetDictionary record from the query. +func (q targetDictionaryQuery) One(ctx context.Context, exec boil.ContextExecutor) (*TargetDictionary, error) { + o := &TargetDictionary{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for target_dictionary") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all TargetDictionary records from the query. +func (q targetDictionaryQuery) All(ctx context.Context, exec boil.ContextExecutor) (TargetDictionarySlice, error) { + var o []*TargetDictionary + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to TargetDictionary slice") + } + + if len(targetDictionaryAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all TargetDictionary records in the query. +func (q targetDictionaryQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count target_dictionary rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q targetDictionaryQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if target_dictionary exists") + } + + return count > 0, nil +} + +// TargetDictionaryTargetType pointed to by the foreign key. +func (o *TargetDictionary) TargetDictionaryTargetType(mods ...qm.QueryMod) targetTypeQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"target_type\" = ?", o.TargetType), + } + + queryMods = append(queryMods, mods...) + + return TargetTypes(queryMods...) +} + +// Chembl pointed to by the foreign key. +func (o *TargetDictionary) Chembl(mods ...qm.QueryMod) chemblIDLookupQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"chembl_id\" = ?", o.ChemblID), + } + + queryMods = append(queryMods, mods...) + + return ChemblIDLookups(queryMods...) +} + +// TidAssays retrieves all the assay's Assays with an executor via tid column. +func (o *TargetDictionary) TidAssays(mods ...qm.QueryMod) assayQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"assays\".\"tid\"=?", o.Tid), + ) + + return Assays(queryMods...) +} + +// TidBindingSites retrieves all the binding_site's BindingSites with an executor via tid column. +func (o *TargetDictionary) TidBindingSites(mods ...qm.QueryMod) bindingSiteQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"binding_sites\".\"tid\"=?", o.Tid), + ) + + return BindingSites(queryMods...) +} + +// TidDrugMechanisms retrieves all the drug_mechanism's DrugMechanisms with an executor via tid column. +func (o *TargetDictionary) TidDrugMechanisms(mods ...qm.QueryMod) drugMechanismQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"drug_mechanism\".\"tid\"=?", o.Tid), + ) + + return DrugMechanisms(queryMods...) +} + +// EnzymeTidMetabolisms retrieves all the metabolism's Metabolisms with an executor via enzyme_tid column. +func (o *TargetDictionary) EnzymeTidMetabolisms(mods ...qm.QueryMod) metabolismQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"metabolism\".\"enzyme_tid\"=?", o.Tid), + ) + + return Metabolisms(queryMods...) +} + +// TidTargetComponents retrieves all the target_component's TargetComponents with an executor via tid column. +func (o *TargetDictionary) TidTargetComponents(mods ...qm.QueryMod) targetComponentQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"target_components\".\"tid\"=?", o.Tid), + ) + + return TargetComponents(queryMods...) +} + +// TidTargetRelations retrieves all the target_relation's TargetRelations with an executor via tid column. +func (o *TargetDictionary) TidTargetRelations(mods ...qm.QueryMod) targetRelationQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"target_relations\".\"tid\"=?", o.Tid), + ) + + return TargetRelations(queryMods...) +} + +// RelatedTidTargetRelations retrieves all the target_relation's TargetRelations with an executor via related_tid column. +func (o *TargetDictionary) RelatedTidTargetRelations(mods ...qm.QueryMod) targetRelationQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"target_relations\".\"related_tid\"=?", o.Tid), + ) + + return TargetRelations(queryMods...) +} + +// LoadTargetDictionaryTargetType allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (targetDictionaryL) LoadTargetDictionaryTargetType(ctx context.Context, e boil.ContextExecutor, singular bool, maybeTargetDictionary interface{}, mods queries.Applicator) error { + var slice []*TargetDictionary + var object *TargetDictionary + + if singular { + object = maybeTargetDictionary.(*TargetDictionary) + } else { + slice = *maybeTargetDictionary.(*[]*TargetDictionary) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &targetDictionaryR{} + } + if !queries.IsNil(object.TargetType) { + args = append(args, object.TargetType) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &targetDictionaryR{} + } + + for _, a := range args { + if queries.Equal(a, obj.TargetType) { + continue Outer + } + } + + if !queries.IsNil(obj.TargetType) { + args = append(args, obj.TargetType) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`target_type`), + qm.WhereIn(`target_type.target_type in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load TargetType") + } + + var resultSlice []*TargetType + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice TargetType") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for target_type") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for target_type") + } + + if len(targetDictionaryAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.TargetDictionaryTargetType = foreign + if foreign.R == nil { + foreign.R = &targetTypeR{} + } + foreign.R.TargetDictionaries = append(foreign.R.TargetDictionaries, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.TargetType, foreign.TargetType) { + local.R.TargetDictionaryTargetType = foreign + if foreign.R == nil { + foreign.R = &targetTypeR{} + } + foreign.R.TargetDictionaries = append(foreign.R.TargetDictionaries, local) + break + } + } + } + + return nil +} + +// LoadChembl allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (targetDictionaryL) LoadChembl(ctx context.Context, e boil.ContextExecutor, singular bool, maybeTargetDictionary interface{}, mods queries.Applicator) error { + var slice []*TargetDictionary + var object *TargetDictionary + + if singular { + object = maybeTargetDictionary.(*TargetDictionary) + } else { + slice = *maybeTargetDictionary.(*[]*TargetDictionary) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &targetDictionaryR{} + } + args = append(args, object.ChemblID) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &targetDictionaryR{} + } + + for _, a := range args { + if a == obj.ChemblID { + continue Outer + } + } + + args = append(args, obj.ChemblID) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`chembl_id_lookup`), + qm.WhereIn(`chembl_id_lookup.chembl_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load ChemblIDLookup") + } + + var resultSlice []*ChemblIDLookup + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice ChemblIDLookup") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for chembl_id_lookup") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for chembl_id_lookup") + } + + if len(targetDictionaryAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Chembl = foreign + if foreign.R == nil { + foreign.R = &chemblIDLookupR{} + } + foreign.R.ChemblTargetDictionary = object + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.ChemblID == foreign.ChemblID { + local.R.Chembl = foreign + if foreign.R == nil { + foreign.R = &chemblIDLookupR{} + } + foreign.R.ChemblTargetDictionary = local + break + } + } + } + + return nil +} + +// LoadTidAssays allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (targetDictionaryL) LoadTidAssays(ctx context.Context, e boil.ContextExecutor, singular bool, maybeTargetDictionary interface{}, mods queries.Applicator) error { + var slice []*TargetDictionary + var object *TargetDictionary + + if singular { + object = maybeTargetDictionary.(*TargetDictionary) + } else { + slice = *maybeTargetDictionary.(*[]*TargetDictionary) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &targetDictionaryR{} + } + args = append(args, object.Tid) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &targetDictionaryR{} + } + + for _, a := range args { + if queries.Equal(a, obj.Tid) { + continue Outer + } + } + + args = append(args, obj.Tid) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`assays`), + qm.WhereIn(`assays.tid in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load assays") + } + + var resultSlice []*Assay + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice assays") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on assays") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for assays") + } + + if len(assayAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.TidAssays = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &assayR{} + } + foreign.R.TidTargetDictionary = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.Tid, foreign.Tid) { + local.R.TidAssays = append(local.R.TidAssays, foreign) + if foreign.R == nil { + foreign.R = &assayR{} + } + foreign.R.TidTargetDictionary = local + break + } + } + } + + return nil +} + +// LoadTidBindingSites allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (targetDictionaryL) LoadTidBindingSites(ctx context.Context, e boil.ContextExecutor, singular bool, maybeTargetDictionary interface{}, mods queries.Applicator) error { + var slice []*TargetDictionary + var object *TargetDictionary + + if singular { + object = maybeTargetDictionary.(*TargetDictionary) + } else { + slice = *maybeTargetDictionary.(*[]*TargetDictionary) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &targetDictionaryR{} + } + args = append(args, object.Tid) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &targetDictionaryR{} + } + + for _, a := range args { + if queries.Equal(a, obj.Tid) { + continue Outer + } + } + + args = append(args, obj.Tid) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`binding_sites`), + qm.WhereIn(`binding_sites.tid in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load binding_sites") + } + + var resultSlice []*BindingSite + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice binding_sites") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on binding_sites") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for binding_sites") + } + + if len(bindingSiteAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.TidBindingSites = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &bindingSiteR{} + } + foreign.R.TidTargetDictionary = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.Tid, foreign.Tid) { + local.R.TidBindingSites = append(local.R.TidBindingSites, foreign) + if foreign.R == nil { + foreign.R = &bindingSiteR{} + } + foreign.R.TidTargetDictionary = local + break + } + } + } + + return nil +} + +// LoadTidDrugMechanisms allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (targetDictionaryL) LoadTidDrugMechanisms(ctx context.Context, e boil.ContextExecutor, singular bool, maybeTargetDictionary interface{}, mods queries.Applicator) error { + var slice []*TargetDictionary + var object *TargetDictionary + + if singular { + object = maybeTargetDictionary.(*TargetDictionary) + } else { + slice = *maybeTargetDictionary.(*[]*TargetDictionary) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &targetDictionaryR{} + } + args = append(args, object.Tid) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &targetDictionaryR{} + } + + for _, a := range args { + if queries.Equal(a, obj.Tid) { + continue Outer + } + } + + args = append(args, obj.Tid) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`drug_mechanism`), + qm.WhereIn(`drug_mechanism.tid in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load drug_mechanism") + } + + var resultSlice []*DrugMechanism + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice drug_mechanism") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on drug_mechanism") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for drug_mechanism") + } + + if len(drugMechanismAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.TidDrugMechanisms = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &drugMechanismR{} + } + foreign.R.TidTargetDictionary = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.Tid, foreign.Tid) { + local.R.TidDrugMechanisms = append(local.R.TidDrugMechanisms, foreign) + if foreign.R == nil { + foreign.R = &drugMechanismR{} + } + foreign.R.TidTargetDictionary = local + break + } + } + } + + return nil +} + +// LoadEnzymeTidMetabolisms allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (targetDictionaryL) LoadEnzymeTidMetabolisms(ctx context.Context, e boil.ContextExecutor, singular bool, maybeTargetDictionary interface{}, mods queries.Applicator) error { + var slice []*TargetDictionary + var object *TargetDictionary + + if singular { + object = maybeTargetDictionary.(*TargetDictionary) + } else { + slice = *maybeTargetDictionary.(*[]*TargetDictionary) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &targetDictionaryR{} + } + args = append(args, object.Tid) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &targetDictionaryR{} + } + + for _, a := range args { + if queries.Equal(a, obj.Tid) { + continue Outer + } + } + + args = append(args, obj.Tid) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`metabolism`), + qm.WhereIn(`metabolism.enzyme_tid in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load metabolism") + } + + var resultSlice []*Metabolism + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice metabolism") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on metabolism") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for metabolism") + } + + if len(metabolismAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.EnzymeTidMetabolisms = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &metabolismR{} + } + foreign.R.EnzymeTidTargetDictionary = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.Tid, foreign.EnzymeTid) { + local.R.EnzymeTidMetabolisms = append(local.R.EnzymeTidMetabolisms, foreign) + if foreign.R == nil { + foreign.R = &metabolismR{} + } + foreign.R.EnzymeTidTargetDictionary = local + break + } + } + } + + return nil +} + +// LoadTidTargetComponents allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (targetDictionaryL) LoadTidTargetComponents(ctx context.Context, e boil.ContextExecutor, singular bool, maybeTargetDictionary interface{}, mods queries.Applicator) error { + var slice []*TargetDictionary + var object *TargetDictionary + + if singular { + object = maybeTargetDictionary.(*TargetDictionary) + } else { + slice = *maybeTargetDictionary.(*[]*TargetDictionary) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &targetDictionaryR{} + } + args = append(args, object.Tid) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &targetDictionaryR{} + } + + for _, a := range args { + if a == obj.Tid { + continue Outer + } + } + + args = append(args, obj.Tid) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`target_components`), + qm.WhereIn(`target_components.tid in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load target_components") + } + + var resultSlice []*TargetComponent + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice target_components") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on target_components") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for target_components") + } + + if len(targetComponentAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.TidTargetComponents = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &targetComponentR{} + } + foreign.R.TidTargetDictionary = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.Tid == foreign.Tid { + local.R.TidTargetComponents = append(local.R.TidTargetComponents, foreign) + if foreign.R == nil { + foreign.R = &targetComponentR{} + } + foreign.R.TidTargetDictionary = local + break + } + } + } + + return nil +} + +// LoadTidTargetRelations allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (targetDictionaryL) LoadTidTargetRelations(ctx context.Context, e boil.ContextExecutor, singular bool, maybeTargetDictionary interface{}, mods queries.Applicator) error { + var slice []*TargetDictionary + var object *TargetDictionary + + if singular { + object = maybeTargetDictionary.(*TargetDictionary) + } else { + slice = *maybeTargetDictionary.(*[]*TargetDictionary) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &targetDictionaryR{} + } + args = append(args, object.Tid) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &targetDictionaryR{} + } + + for _, a := range args { + if a == obj.Tid { + continue Outer + } + } + + args = append(args, obj.Tid) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`target_relations`), + qm.WhereIn(`target_relations.tid in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load target_relations") + } + + var resultSlice []*TargetRelation + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice target_relations") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on target_relations") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for target_relations") + } + + if len(targetRelationAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.TidTargetRelations = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &targetRelationR{} + } + foreign.R.TidTargetDictionary = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.Tid == foreign.Tid { + local.R.TidTargetRelations = append(local.R.TidTargetRelations, foreign) + if foreign.R == nil { + foreign.R = &targetRelationR{} + } + foreign.R.TidTargetDictionary = local + break + } + } + } + + return nil +} + +// LoadRelatedTidTargetRelations allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (targetDictionaryL) LoadRelatedTidTargetRelations(ctx context.Context, e boil.ContextExecutor, singular bool, maybeTargetDictionary interface{}, mods queries.Applicator) error { + var slice []*TargetDictionary + var object *TargetDictionary + + if singular { + object = maybeTargetDictionary.(*TargetDictionary) + } else { + slice = *maybeTargetDictionary.(*[]*TargetDictionary) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &targetDictionaryR{} + } + args = append(args, object.Tid) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &targetDictionaryR{} + } + + for _, a := range args { + if a == obj.Tid { + continue Outer + } + } + + args = append(args, obj.Tid) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`target_relations`), + qm.WhereIn(`target_relations.related_tid in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load target_relations") + } + + var resultSlice []*TargetRelation + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice target_relations") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on target_relations") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for target_relations") + } + + if len(targetRelationAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.RelatedTidTargetRelations = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &targetRelationR{} + } + foreign.R.RelatedTidTargetDictionary = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.Tid == foreign.RelatedTid { + local.R.RelatedTidTargetRelations = append(local.R.RelatedTidTargetRelations, foreign) + if foreign.R == nil { + foreign.R = &targetRelationR{} + } + foreign.R.RelatedTidTargetDictionary = local + break + } + } + } + + return nil +} + +// SetTargetDictionaryTargetType of the targetDictionary to the related item. +// Sets o.R.TargetDictionaryTargetType to related. +// Adds o to related.R.TargetDictionaries. +func (o *TargetDictionary) SetTargetDictionaryTargetType(ctx context.Context, exec boil.ContextExecutor, insert bool, related *TargetType) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"target_dictionary\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"target_type"}), + strmangle.WhereClause("\"", "\"", 0, targetDictionaryPrimaryKeyColumns), + ) + values := []interface{}{related.TargetType, o.Tid} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.TargetType, related.TargetType) + if o.R == nil { + o.R = &targetDictionaryR{ + TargetDictionaryTargetType: related, + } + } else { + o.R.TargetDictionaryTargetType = related + } + + if related.R == nil { + related.R = &targetTypeR{ + TargetDictionaries: TargetDictionarySlice{o}, + } + } else { + related.R.TargetDictionaries = append(related.R.TargetDictionaries, o) + } + + return nil +} + +// RemoveTargetDictionaryTargetType relationship. +// Sets o.R.TargetDictionaryTargetType to nil. +// Removes o from all passed in related items' relationships struct. +func (o *TargetDictionary) RemoveTargetDictionaryTargetType(ctx context.Context, exec boil.ContextExecutor, related *TargetType) error { + var err error + + queries.SetScanner(&o.TargetType, nil) + if _, err = o.Update(ctx, exec, boil.Whitelist("target_type")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.TargetDictionaryTargetType = nil + } + if related == nil || related.R == nil { + return nil + } + + for i, ri := range related.R.TargetDictionaries { + if queries.Equal(o.TargetType, ri.TargetType) { + continue + } + + ln := len(related.R.TargetDictionaries) + if ln > 1 && i < ln-1 { + related.R.TargetDictionaries[i] = related.R.TargetDictionaries[ln-1] + } + related.R.TargetDictionaries = related.R.TargetDictionaries[:ln-1] + break + } + return nil +} + +// SetChembl of the targetDictionary to the related item. +// Sets o.R.Chembl to related. +// Adds o to related.R.ChemblTargetDictionary. +func (o *TargetDictionary) SetChembl(ctx context.Context, exec boil.ContextExecutor, insert bool, related *ChemblIDLookup) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"target_dictionary\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"chembl_id"}), + strmangle.WhereClause("\"", "\"", 0, targetDictionaryPrimaryKeyColumns), + ) + values := []interface{}{related.ChemblID, o.Tid} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.ChemblID = related.ChemblID + if o.R == nil { + o.R = &targetDictionaryR{ + Chembl: related, + } + } else { + o.R.Chembl = related + } + + if related.R == nil { + related.R = &chemblIDLookupR{ + ChemblTargetDictionary: o, + } + } else { + related.R.ChemblTargetDictionary = o + } + + return nil +} + +// AddTidAssays adds the given related objects to the existing relationships +// of the target_dictionary, optionally inserting them as new records. +// Appends related to o.R.TidAssays. +// Sets related.R.TidTargetDictionary appropriately. +func (o *TargetDictionary) AddTidAssays(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Assay) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.Tid, o.Tid) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"assays\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"tid"}), + strmangle.WhereClause("\"", "\"", 0, assayPrimaryKeyColumns), + ) + values := []interface{}{o.Tid, rel.AssayID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.Tid, o.Tid) + } + } + + if o.R == nil { + o.R = &targetDictionaryR{ + TidAssays: related, + } + } else { + o.R.TidAssays = append(o.R.TidAssays, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &assayR{ + TidTargetDictionary: o, + } + } else { + rel.R.TidTargetDictionary = o + } + } + return nil +} + +// SetTidAssays removes all previously related items of the +// target_dictionary replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.TidTargetDictionary's TidAssays accordingly. +// Replaces o.R.TidAssays with related. +// Sets related.R.TidTargetDictionary's TidAssays accordingly. +func (o *TargetDictionary) SetTidAssays(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Assay) error { + query := "update \"assays\" set \"tid\" = null where \"tid\" = ?" + values := []interface{}{o.Tid} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + if o.R != nil { + for _, rel := range o.R.TidAssays { + queries.SetScanner(&rel.Tid, nil) + if rel.R == nil { + continue + } + + rel.R.TidTargetDictionary = nil + } + o.R.TidAssays = nil + } + + return o.AddTidAssays(ctx, exec, insert, related...) +} + +// RemoveTidAssays relationships from objects passed in. +// Removes related items from R.TidAssays (uses pointer comparison, removal does not keep order) +// Sets related.R.TidTargetDictionary. +func (o *TargetDictionary) RemoveTidAssays(ctx context.Context, exec boil.ContextExecutor, related ...*Assay) error { + if len(related) == 0 { + return nil + } + + var err error + for _, rel := range related { + queries.SetScanner(&rel.Tid, nil) + if rel.R != nil { + rel.R.TidTargetDictionary = nil + } + if _, err = rel.Update(ctx, exec, boil.Whitelist("tid")); err != nil { + return err + } + } + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.TidAssays { + if rel != ri { + continue + } + + ln := len(o.R.TidAssays) + if ln > 1 && i < ln-1 { + o.R.TidAssays[i] = o.R.TidAssays[ln-1] + } + o.R.TidAssays = o.R.TidAssays[:ln-1] + break + } + } + + return nil +} + +// AddTidBindingSites adds the given related objects to the existing relationships +// of the target_dictionary, optionally inserting them as new records. +// Appends related to o.R.TidBindingSites. +// Sets related.R.TidTargetDictionary appropriately. +func (o *TargetDictionary) AddTidBindingSites(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*BindingSite) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.Tid, o.Tid) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"binding_sites\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"tid"}), + strmangle.WhereClause("\"", "\"", 0, bindingSitePrimaryKeyColumns), + ) + values := []interface{}{o.Tid, rel.SiteID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.Tid, o.Tid) + } + } + + if o.R == nil { + o.R = &targetDictionaryR{ + TidBindingSites: related, + } + } else { + o.R.TidBindingSites = append(o.R.TidBindingSites, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &bindingSiteR{ + TidTargetDictionary: o, + } + } else { + rel.R.TidTargetDictionary = o + } + } + return nil +} + +// SetTidBindingSites removes all previously related items of the +// target_dictionary replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.TidTargetDictionary's TidBindingSites accordingly. +// Replaces o.R.TidBindingSites with related. +// Sets related.R.TidTargetDictionary's TidBindingSites accordingly. +func (o *TargetDictionary) SetTidBindingSites(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*BindingSite) error { + query := "update \"binding_sites\" set \"tid\" = null where \"tid\" = ?" + values := []interface{}{o.Tid} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + if o.R != nil { + for _, rel := range o.R.TidBindingSites { + queries.SetScanner(&rel.Tid, nil) + if rel.R == nil { + continue + } + + rel.R.TidTargetDictionary = nil + } + o.R.TidBindingSites = nil + } + + return o.AddTidBindingSites(ctx, exec, insert, related...) +} + +// RemoveTidBindingSites relationships from objects passed in. +// Removes related items from R.TidBindingSites (uses pointer comparison, removal does not keep order) +// Sets related.R.TidTargetDictionary. +func (o *TargetDictionary) RemoveTidBindingSites(ctx context.Context, exec boil.ContextExecutor, related ...*BindingSite) error { + if len(related) == 0 { + return nil + } + + var err error + for _, rel := range related { + queries.SetScanner(&rel.Tid, nil) + if rel.R != nil { + rel.R.TidTargetDictionary = nil + } + if _, err = rel.Update(ctx, exec, boil.Whitelist("tid")); err != nil { + return err + } + } + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.TidBindingSites { + if rel != ri { + continue + } + + ln := len(o.R.TidBindingSites) + if ln > 1 && i < ln-1 { + o.R.TidBindingSites[i] = o.R.TidBindingSites[ln-1] + } + o.R.TidBindingSites = o.R.TidBindingSites[:ln-1] + break + } + } + + return nil +} + +// AddTidDrugMechanisms adds the given related objects to the existing relationships +// of the target_dictionary, optionally inserting them as new records. +// Appends related to o.R.TidDrugMechanisms. +// Sets related.R.TidTargetDictionary appropriately. +func (o *TargetDictionary) AddTidDrugMechanisms(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*DrugMechanism) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.Tid, o.Tid) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"drug_mechanism\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"tid"}), + strmangle.WhereClause("\"", "\"", 0, drugMechanismPrimaryKeyColumns), + ) + values := []interface{}{o.Tid, rel.MecID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.Tid, o.Tid) + } + } + + if o.R == nil { + o.R = &targetDictionaryR{ + TidDrugMechanisms: related, + } + } else { + o.R.TidDrugMechanisms = append(o.R.TidDrugMechanisms, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &drugMechanismR{ + TidTargetDictionary: o, + } + } else { + rel.R.TidTargetDictionary = o + } + } + return nil +} + +// SetTidDrugMechanisms removes all previously related items of the +// target_dictionary replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.TidTargetDictionary's TidDrugMechanisms accordingly. +// Replaces o.R.TidDrugMechanisms with related. +// Sets related.R.TidTargetDictionary's TidDrugMechanisms accordingly. +func (o *TargetDictionary) SetTidDrugMechanisms(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*DrugMechanism) error { + query := "update \"drug_mechanism\" set \"tid\" = null where \"tid\" = ?" + values := []interface{}{o.Tid} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + if o.R != nil { + for _, rel := range o.R.TidDrugMechanisms { + queries.SetScanner(&rel.Tid, nil) + if rel.R == nil { + continue + } + + rel.R.TidTargetDictionary = nil + } + o.R.TidDrugMechanisms = nil + } + + return o.AddTidDrugMechanisms(ctx, exec, insert, related...) +} + +// RemoveTidDrugMechanisms relationships from objects passed in. +// Removes related items from R.TidDrugMechanisms (uses pointer comparison, removal does not keep order) +// Sets related.R.TidTargetDictionary. +func (o *TargetDictionary) RemoveTidDrugMechanisms(ctx context.Context, exec boil.ContextExecutor, related ...*DrugMechanism) error { + if len(related) == 0 { + return nil + } + + var err error + for _, rel := range related { + queries.SetScanner(&rel.Tid, nil) + if rel.R != nil { + rel.R.TidTargetDictionary = nil + } + if _, err = rel.Update(ctx, exec, boil.Whitelist("tid")); err != nil { + return err + } + } + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.TidDrugMechanisms { + if rel != ri { + continue + } + + ln := len(o.R.TidDrugMechanisms) + if ln > 1 && i < ln-1 { + o.R.TidDrugMechanisms[i] = o.R.TidDrugMechanisms[ln-1] + } + o.R.TidDrugMechanisms = o.R.TidDrugMechanisms[:ln-1] + break + } + } + + return nil +} + +// AddEnzymeTidMetabolisms adds the given related objects to the existing relationships +// of the target_dictionary, optionally inserting them as new records. +// Appends related to o.R.EnzymeTidMetabolisms. +// Sets related.R.EnzymeTidTargetDictionary appropriately. +func (o *TargetDictionary) AddEnzymeTidMetabolisms(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Metabolism) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.EnzymeTid, o.Tid) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"metabolism\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"enzyme_tid"}), + strmangle.WhereClause("\"", "\"", 0, metabolismPrimaryKeyColumns), + ) + values := []interface{}{o.Tid, rel.MetID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.EnzymeTid, o.Tid) + } + } + + if o.R == nil { + o.R = &targetDictionaryR{ + EnzymeTidMetabolisms: related, + } + } else { + o.R.EnzymeTidMetabolisms = append(o.R.EnzymeTidMetabolisms, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &metabolismR{ + EnzymeTidTargetDictionary: o, + } + } else { + rel.R.EnzymeTidTargetDictionary = o + } + } + return nil +} + +// SetEnzymeTidMetabolisms removes all previously related items of the +// target_dictionary replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.EnzymeTidTargetDictionary's EnzymeTidMetabolisms accordingly. +// Replaces o.R.EnzymeTidMetabolisms with related. +// Sets related.R.EnzymeTidTargetDictionary's EnzymeTidMetabolisms accordingly. +func (o *TargetDictionary) SetEnzymeTidMetabolisms(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Metabolism) error { + query := "update \"metabolism\" set \"enzyme_tid\" = null where \"enzyme_tid\" = ?" + values := []interface{}{o.Tid} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + if o.R != nil { + for _, rel := range o.R.EnzymeTidMetabolisms { + queries.SetScanner(&rel.EnzymeTid, nil) + if rel.R == nil { + continue + } + + rel.R.EnzymeTidTargetDictionary = nil + } + o.R.EnzymeTidMetabolisms = nil + } + + return o.AddEnzymeTidMetabolisms(ctx, exec, insert, related...) +} + +// RemoveEnzymeTidMetabolisms relationships from objects passed in. +// Removes related items from R.EnzymeTidMetabolisms (uses pointer comparison, removal does not keep order) +// Sets related.R.EnzymeTidTargetDictionary. +func (o *TargetDictionary) RemoveEnzymeTidMetabolisms(ctx context.Context, exec boil.ContextExecutor, related ...*Metabolism) error { + if len(related) == 0 { + return nil + } + + var err error + for _, rel := range related { + queries.SetScanner(&rel.EnzymeTid, nil) + if rel.R != nil { + rel.R.EnzymeTidTargetDictionary = nil + } + if _, err = rel.Update(ctx, exec, boil.Whitelist("enzyme_tid")); err != nil { + return err + } + } + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.EnzymeTidMetabolisms { + if rel != ri { + continue + } + + ln := len(o.R.EnzymeTidMetabolisms) + if ln > 1 && i < ln-1 { + o.R.EnzymeTidMetabolisms[i] = o.R.EnzymeTidMetabolisms[ln-1] + } + o.R.EnzymeTidMetabolisms = o.R.EnzymeTidMetabolisms[:ln-1] + break + } + } + + return nil +} + +// AddTidTargetComponents adds the given related objects to the existing relationships +// of the target_dictionary, optionally inserting them as new records. +// Appends related to o.R.TidTargetComponents. +// Sets related.R.TidTargetDictionary appropriately. +func (o *TargetDictionary) AddTidTargetComponents(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*TargetComponent) error { + var err error + for _, rel := range related { + if insert { + rel.Tid = o.Tid + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"target_components\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"tid"}), + strmangle.WhereClause("\"", "\"", 0, targetComponentPrimaryKeyColumns), + ) + values := []interface{}{o.Tid, rel.TargcompID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.Tid = o.Tid + } + } + + if o.R == nil { + o.R = &targetDictionaryR{ + TidTargetComponents: related, + } + } else { + o.R.TidTargetComponents = append(o.R.TidTargetComponents, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &targetComponentR{ + TidTargetDictionary: o, + } + } else { + rel.R.TidTargetDictionary = o + } + } + return nil +} + +// AddTidTargetRelations adds the given related objects to the existing relationships +// of the target_dictionary, optionally inserting them as new records. +// Appends related to o.R.TidTargetRelations. +// Sets related.R.TidTargetDictionary appropriately. +func (o *TargetDictionary) AddTidTargetRelations(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*TargetRelation) error { + var err error + for _, rel := range related { + if insert { + rel.Tid = o.Tid + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"target_relations\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"tid"}), + strmangle.WhereClause("\"", "\"", 0, targetRelationPrimaryKeyColumns), + ) + values := []interface{}{o.Tid, rel.TargrelID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.Tid = o.Tid + } + } + + if o.R == nil { + o.R = &targetDictionaryR{ + TidTargetRelations: related, + } + } else { + o.R.TidTargetRelations = append(o.R.TidTargetRelations, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &targetRelationR{ + TidTargetDictionary: o, + } + } else { + rel.R.TidTargetDictionary = o + } + } + return nil +} + +// AddRelatedTidTargetRelations adds the given related objects to the existing relationships +// of the target_dictionary, optionally inserting them as new records. +// Appends related to o.R.RelatedTidTargetRelations. +// Sets related.R.RelatedTidTargetDictionary appropriately. +func (o *TargetDictionary) AddRelatedTidTargetRelations(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*TargetRelation) error { + var err error + for _, rel := range related { + if insert { + rel.RelatedTid = o.Tid + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"target_relations\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"related_tid"}), + strmangle.WhereClause("\"", "\"", 0, targetRelationPrimaryKeyColumns), + ) + values := []interface{}{o.Tid, rel.TargrelID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.RelatedTid = o.Tid + } + } + + if o.R == nil { + o.R = &targetDictionaryR{ + RelatedTidTargetRelations: related, + } + } else { + o.R.RelatedTidTargetRelations = append(o.R.RelatedTidTargetRelations, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &targetRelationR{ + RelatedTidTargetDictionary: o, + } + } else { + rel.R.RelatedTidTargetDictionary = o + } + } + return nil +} + +// TargetDictionaries retrieves all the records using an executor. +func TargetDictionaries(mods ...qm.QueryMod) targetDictionaryQuery { + mods = append(mods, qm.From("\"target_dictionary\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"target_dictionary\".*"}) + } + + return targetDictionaryQuery{q} +} + +// FindTargetDictionary retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindTargetDictionary(ctx context.Context, exec boil.ContextExecutor, tid int64, selectCols ...string) (*TargetDictionary, error) { + targetDictionaryObj := &TargetDictionary{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"target_dictionary\" where \"tid\"=?", sel, + ) + + q := queries.Raw(query, tid) + + err := q.Bind(ctx, exec, targetDictionaryObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from target_dictionary") + } + + if err = targetDictionaryObj.doAfterSelectHooks(ctx, exec); err != nil { + return targetDictionaryObj, err + } + + return targetDictionaryObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *TargetDictionary) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no target_dictionary provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(targetDictionaryColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + targetDictionaryInsertCacheMut.RLock() + cache, cached := targetDictionaryInsertCache[key] + targetDictionaryInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + targetDictionaryAllColumns, + targetDictionaryColumnsWithDefault, + targetDictionaryColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(targetDictionaryType, targetDictionaryMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(targetDictionaryType, targetDictionaryMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"target_dictionary\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"target_dictionary\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into target_dictionary") + } + + if !cached { + targetDictionaryInsertCacheMut.Lock() + targetDictionaryInsertCache[key] = cache + targetDictionaryInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the TargetDictionary. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *TargetDictionary) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + targetDictionaryUpdateCacheMut.RLock() + cache, cached := targetDictionaryUpdateCache[key] + targetDictionaryUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + targetDictionaryAllColumns, + targetDictionaryPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update target_dictionary, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"target_dictionary\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, targetDictionaryPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(targetDictionaryType, targetDictionaryMapping, append(wl, targetDictionaryPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update target_dictionary row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for target_dictionary") + } + + if !cached { + targetDictionaryUpdateCacheMut.Lock() + targetDictionaryUpdateCache[key] = cache + targetDictionaryUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q targetDictionaryQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for target_dictionary") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for target_dictionary") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o TargetDictionarySlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), targetDictionaryPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"target_dictionary\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, targetDictionaryPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in targetDictionary slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all targetDictionary") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *TargetDictionary) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no target_dictionary provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(targetDictionaryColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + targetDictionaryUpsertCacheMut.RLock() + cache, cached := targetDictionaryUpsertCache[key] + targetDictionaryUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + targetDictionaryAllColumns, + targetDictionaryColumnsWithDefault, + targetDictionaryColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + targetDictionaryAllColumns, + targetDictionaryPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert target_dictionary, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(targetDictionaryPrimaryKeyColumns)) + copy(conflict, targetDictionaryPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"target_dictionary\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(targetDictionaryType, targetDictionaryMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(targetDictionaryType, targetDictionaryMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert target_dictionary") + } + + if !cached { + targetDictionaryUpsertCacheMut.Lock() + targetDictionaryUpsertCache[key] = cache + targetDictionaryUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single TargetDictionary record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *TargetDictionary) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no TargetDictionary provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), targetDictionaryPrimaryKeyMapping) + sql := "DELETE FROM \"target_dictionary\" WHERE \"tid\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from target_dictionary") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for target_dictionary") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q targetDictionaryQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no targetDictionaryQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from target_dictionary") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for target_dictionary") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o TargetDictionarySlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(targetDictionaryBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), targetDictionaryPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"target_dictionary\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, targetDictionaryPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from targetDictionary slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for target_dictionary") + } + + if len(targetDictionaryAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *TargetDictionary) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindTargetDictionary(ctx, exec, o.Tid) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *TargetDictionarySlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := TargetDictionarySlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), targetDictionaryPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"target_dictionary\".* FROM \"target_dictionary\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, targetDictionaryPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in TargetDictionarySlice") + } + + *o = slice + + return nil +} + +// TargetDictionaryExists checks if the TargetDictionary row exists. +func TargetDictionaryExists(ctx context.Context, exec boil.ContextExecutor, tid int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"target_dictionary\" where \"tid\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, tid) + } + row := exec.QueryRowContext(ctx, sql, tid) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if target_dictionary exists") + } + + return exists, nil +} diff --git a/models/target_relations.go b/models/target_relations.go new file mode 100644 index 0000000..faf8bb6 --- /dev/null +++ b/models/target_relations.go @@ -0,0 +1,1248 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// TargetRelation is an object representing the database table. +type TargetRelation struct { + Tid int64 `boil:"tid" json:"tid" toml:"tid" yaml:"tid"` + Relationship string `boil:"relationship" json:"relationship" toml:"relationship" yaml:"relationship"` + RelatedTid int64 `boil:"related_tid" json:"related_tid" toml:"related_tid" yaml:"related_tid"` + TargrelID int64 `boil:"targrel_id" json:"targrel_id" toml:"targrel_id" yaml:"targrel_id"` + + R *targetRelationR `boil:"-" json:"-" toml:"-" yaml:"-"` + L targetRelationL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var TargetRelationColumns = struct { + Tid string + Relationship string + RelatedTid string + TargrelID string +}{ + Tid: "tid", + Relationship: "relationship", + RelatedTid: "related_tid", + TargrelID: "targrel_id", +} + +var TargetRelationTableColumns = struct { + Tid string + Relationship string + RelatedTid string + TargrelID string +}{ + Tid: "target_relations.tid", + Relationship: "target_relations.relationship", + RelatedTid: "target_relations.related_tid", + TargrelID: "target_relations.targrel_id", +} + +// Generated where + +var TargetRelationWhere = struct { + Tid whereHelperint64 + Relationship whereHelperstring + RelatedTid whereHelperint64 + TargrelID whereHelperint64 +}{ + Tid: whereHelperint64{field: "\"target_relations\".\"tid\""}, + Relationship: whereHelperstring{field: "\"target_relations\".\"relationship\""}, + RelatedTid: whereHelperint64{field: "\"target_relations\".\"related_tid\""}, + TargrelID: whereHelperint64{field: "\"target_relations\".\"targrel_id\""}, +} + +// TargetRelationRels is where relationship names are stored. +var TargetRelationRels = struct { + TidTargetDictionary string + RelatedTidTargetDictionary string +}{ + TidTargetDictionary: "TidTargetDictionary", + RelatedTidTargetDictionary: "RelatedTidTargetDictionary", +} + +// targetRelationR is where relationships are stored. +type targetRelationR struct { + TidTargetDictionary *TargetDictionary `boil:"TidTargetDictionary" json:"TidTargetDictionary" toml:"TidTargetDictionary" yaml:"TidTargetDictionary"` + RelatedTidTargetDictionary *TargetDictionary `boil:"RelatedTidTargetDictionary" json:"RelatedTidTargetDictionary" toml:"RelatedTidTargetDictionary" yaml:"RelatedTidTargetDictionary"` +} + +// NewStruct creates a new relationship struct +func (*targetRelationR) NewStruct() *targetRelationR { + return &targetRelationR{} +} + +func (r *targetRelationR) GetTidTargetDictionary() *TargetDictionary { + if r == nil { + return nil + } + return r.TidTargetDictionary +} + +func (r *targetRelationR) GetRelatedTidTargetDictionary() *TargetDictionary { + if r == nil { + return nil + } + return r.RelatedTidTargetDictionary +} + +// targetRelationL is where Load methods for each relationship are stored. +type targetRelationL struct{} + +var ( + targetRelationAllColumns = []string{"tid", "relationship", "related_tid", "targrel_id"} + targetRelationColumnsWithoutDefault = []string{"tid", "relationship", "related_tid", "targrel_id"} + targetRelationColumnsWithDefault = []string{} + targetRelationPrimaryKeyColumns = []string{"targrel_id"} + targetRelationGeneratedColumns = []string{} +) + +type ( + // TargetRelationSlice is an alias for a slice of pointers to TargetRelation. + // This should almost always be used instead of []TargetRelation. + TargetRelationSlice []*TargetRelation + // TargetRelationHook is the signature for custom TargetRelation hook methods + TargetRelationHook func(context.Context, boil.ContextExecutor, *TargetRelation) error + + targetRelationQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + targetRelationType = reflect.TypeOf(&TargetRelation{}) + targetRelationMapping = queries.MakeStructMapping(targetRelationType) + targetRelationPrimaryKeyMapping, _ = queries.BindMapping(targetRelationType, targetRelationMapping, targetRelationPrimaryKeyColumns) + targetRelationInsertCacheMut sync.RWMutex + targetRelationInsertCache = make(map[string]insertCache) + targetRelationUpdateCacheMut sync.RWMutex + targetRelationUpdateCache = make(map[string]updateCache) + targetRelationUpsertCacheMut sync.RWMutex + targetRelationUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var targetRelationAfterSelectHooks []TargetRelationHook + +var targetRelationBeforeInsertHooks []TargetRelationHook +var targetRelationAfterInsertHooks []TargetRelationHook + +var targetRelationBeforeUpdateHooks []TargetRelationHook +var targetRelationAfterUpdateHooks []TargetRelationHook + +var targetRelationBeforeDeleteHooks []TargetRelationHook +var targetRelationAfterDeleteHooks []TargetRelationHook + +var targetRelationBeforeUpsertHooks []TargetRelationHook +var targetRelationAfterUpsertHooks []TargetRelationHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *TargetRelation) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range targetRelationAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *TargetRelation) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range targetRelationBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *TargetRelation) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range targetRelationAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *TargetRelation) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range targetRelationBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *TargetRelation) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range targetRelationAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *TargetRelation) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range targetRelationBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *TargetRelation) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range targetRelationAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *TargetRelation) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range targetRelationBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *TargetRelation) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range targetRelationAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddTargetRelationHook registers your hook function for all future operations. +func AddTargetRelationHook(hookPoint boil.HookPoint, targetRelationHook TargetRelationHook) { + switch hookPoint { + case boil.AfterSelectHook: + targetRelationAfterSelectHooks = append(targetRelationAfterSelectHooks, targetRelationHook) + case boil.BeforeInsertHook: + targetRelationBeforeInsertHooks = append(targetRelationBeforeInsertHooks, targetRelationHook) + case boil.AfterInsertHook: + targetRelationAfterInsertHooks = append(targetRelationAfterInsertHooks, targetRelationHook) + case boil.BeforeUpdateHook: + targetRelationBeforeUpdateHooks = append(targetRelationBeforeUpdateHooks, targetRelationHook) + case boil.AfterUpdateHook: + targetRelationAfterUpdateHooks = append(targetRelationAfterUpdateHooks, targetRelationHook) + case boil.BeforeDeleteHook: + targetRelationBeforeDeleteHooks = append(targetRelationBeforeDeleteHooks, targetRelationHook) + case boil.AfterDeleteHook: + targetRelationAfterDeleteHooks = append(targetRelationAfterDeleteHooks, targetRelationHook) + case boil.BeforeUpsertHook: + targetRelationBeforeUpsertHooks = append(targetRelationBeforeUpsertHooks, targetRelationHook) + case boil.AfterUpsertHook: + targetRelationAfterUpsertHooks = append(targetRelationAfterUpsertHooks, targetRelationHook) + } +} + +// One returns a single targetRelation record from the query. +func (q targetRelationQuery) One(ctx context.Context, exec boil.ContextExecutor) (*TargetRelation, error) { + o := &TargetRelation{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for target_relations") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all TargetRelation records from the query. +func (q targetRelationQuery) All(ctx context.Context, exec boil.ContextExecutor) (TargetRelationSlice, error) { + var o []*TargetRelation + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to TargetRelation slice") + } + + if len(targetRelationAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all TargetRelation records in the query. +func (q targetRelationQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count target_relations rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q targetRelationQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if target_relations exists") + } + + return count > 0, nil +} + +// TidTargetDictionary pointed to by the foreign key. +func (o *TargetRelation) TidTargetDictionary(mods ...qm.QueryMod) targetDictionaryQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"tid\" = ?", o.Tid), + } + + queryMods = append(queryMods, mods...) + + return TargetDictionaries(queryMods...) +} + +// RelatedTidTargetDictionary pointed to by the foreign key. +func (o *TargetRelation) RelatedTidTargetDictionary(mods ...qm.QueryMod) targetDictionaryQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"tid\" = ?", o.RelatedTid), + } + + queryMods = append(queryMods, mods...) + + return TargetDictionaries(queryMods...) +} + +// LoadTidTargetDictionary allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (targetRelationL) LoadTidTargetDictionary(ctx context.Context, e boil.ContextExecutor, singular bool, maybeTargetRelation interface{}, mods queries.Applicator) error { + var slice []*TargetRelation + var object *TargetRelation + + if singular { + object = maybeTargetRelation.(*TargetRelation) + } else { + slice = *maybeTargetRelation.(*[]*TargetRelation) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &targetRelationR{} + } + args = append(args, object.Tid) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &targetRelationR{} + } + + for _, a := range args { + if a == obj.Tid { + continue Outer + } + } + + args = append(args, obj.Tid) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`target_dictionary`), + qm.WhereIn(`target_dictionary.tid in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load TargetDictionary") + } + + var resultSlice []*TargetDictionary + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice TargetDictionary") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for target_dictionary") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for target_dictionary") + } + + if len(targetRelationAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.TidTargetDictionary = foreign + if foreign.R == nil { + foreign.R = &targetDictionaryR{} + } + foreign.R.TidTargetRelations = append(foreign.R.TidTargetRelations, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.Tid == foreign.Tid { + local.R.TidTargetDictionary = foreign + if foreign.R == nil { + foreign.R = &targetDictionaryR{} + } + foreign.R.TidTargetRelations = append(foreign.R.TidTargetRelations, local) + break + } + } + } + + return nil +} + +// LoadRelatedTidTargetDictionary allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (targetRelationL) LoadRelatedTidTargetDictionary(ctx context.Context, e boil.ContextExecutor, singular bool, maybeTargetRelation interface{}, mods queries.Applicator) error { + var slice []*TargetRelation + var object *TargetRelation + + if singular { + object = maybeTargetRelation.(*TargetRelation) + } else { + slice = *maybeTargetRelation.(*[]*TargetRelation) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &targetRelationR{} + } + args = append(args, object.RelatedTid) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &targetRelationR{} + } + + for _, a := range args { + if a == obj.RelatedTid { + continue Outer + } + } + + args = append(args, obj.RelatedTid) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`target_dictionary`), + qm.WhereIn(`target_dictionary.tid in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load TargetDictionary") + } + + var resultSlice []*TargetDictionary + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice TargetDictionary") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for target_dictionary") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for target_dictionary") + } + + if len(targetRelationAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.RelatedTidTargetDictionary = foreign + if foreign.R == nil { + foreign.R = &targetDictionaryR{} + } + foreign.R.RelatedTidTargetRelations = append(foreign.R.RelatedTidTargetRelations, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.RelatedTid == foreign.Tid { + local.R.RelatedTidTargetDictionary = foreign + if foreign.R == nil { + foreign.R = &targetDictionaryR{} + } + foreign.R.RelatedTidTargetRelations = append(foreign.R.RelatedTidTargetRelations, local) + break + } + } + } + + return nil +} + +// SetTidTargetDictionary of the targetRelation to the related item. +// Sets o.R.TidTargetDictionary to related. +// Adds o to related.R.TidTargetRelations. +func (o *TargetRelation) SetTidTargetDictionary(ctx context.Context, exec boil.ContextExecutor, insert bool, related *TargetDictionary) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"target_relations\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"tid"}), + strmangle.WhereClause("\"", "\"", 0, targetRelationPrimaryKeyColumns), + ) + values := []interface{}{related.Tid, o.TargrelID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.Tid = related.Tid + if o.R == nil { + o.R = &targetRelationR{ + TidTargetDictionary: related, + } + } else { + o.R.TidTargetDictionary = related + } + + if related.R == nil { + related.R = &targetDictionaryR{ + TidTargetRelations: TargetRelationSlice{o}, + } + } else { + related.R.TidTargetRelations = append(related.R.TidTargetRelations, o) + } + + return nil +} + +// SetRelatedTidTargetDictionary of the targetRelation to the related item. +// Sets o.R.RelatedTidTargetDictionary to related. +// Adds o to related.R.RelatedTidTargetRelations. +func (o *TargetRelation) SetRelatedTidTargetDictionary(ctx context.Context, exec boil.ContextExecutor, insert bool, related *TargetDictionary) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"target_relations\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"related_tid"}), + strmangle.WhereClause("\"", "\"", 0, targetRelationPrimaryKeyColumns), + ) + values := []interface{}{related.Tid, o.TargrelID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.RelatedTid = related.Tid + if o.R == nil { + o.R = &targetRelationR{ + RelatedTidTargetDictionary: related, + } + } else { + o.R.RelatedTidTargetDictionary = related + } + + if related.R == nil { + related.R = &targetDictionaryR{ + RelatedTidTargetRelations: TargetRelationSlice{o}, + } + } else { + related.R.RelatedTidTargetRelations = append(related.R.RelatedTidTargetRelations, o) + } + + return nil +} + +// TargetRelations retrieves all the records using an executor. +func TargetRelations(mods ...qm.QueryMod) targetRelationQuery { + mods = append(mods, qm.From("\"target_relations\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"target_relations\".*"}) + } + + return targetRelationQuery{q} +} + +// FindTargetRelation retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindTargetRelation(ctx context.Context, exec boil.ContextExecutor, targrelID int64, selectCols ...string) (*TargetRelation, error) { + targetRelationObj := &TargetRelation{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"target_relations\" where \"targrel_id\"=?", sel, + ) + + q := queries.Raw(query, targrelID) + + err := q.Bind(ctx, exec, targetRelationObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from target_relations") + } + + if err = targetRelationObj.doAfterSelectHooks(ctx, exec); err != nil { + return targetRelationObj, err + } + + return targetRelationObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *TargetRelation) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no target_relations provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(targetRelationColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + targetRelationInsertCacheMut.RLock() + cache, cached := targetRelationInsertCache[key] + targetRelationInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + targetRelationAllColumns, + targetRelationColumnsWithDefault, + targetRelationColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(targetRelationType, targetRelationMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(targetRelationType, targetRelationMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"target_relations\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"target_relations\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into target_relations") + } + + if !cached { + targetRelationInsertCacheMut.Lock() + targetRelationInsertCache[key] = cache + targetRelationInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the TargetRelation. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *TargetRelation) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + targetRelationUpdateCacheMut.RLock() + cache, cached := targetRelationUpdateCache[key] + targetRelationUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + targetRelationAllColumns, + targetRelationPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update target_relations, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"target_relations\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, targetRelationPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(targetRelationType, targetRelationMapping, append(wl, targetRelationPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update target_relations row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for target_relations") + } + + if !cached { + targetRelationUpdateCacheMut.Lock() + targetRelationUpdateCache[key] = cache + targetRelationUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q targetRelationQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for target_relations") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for target_relations") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o TargetRelationSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), targetRelationPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"target_relations\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, targetRelationPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in targetRelation slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all targetRelation") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *TargetRelation) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no target_relations provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(targetRelationColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + targetRelationUpsertCacheMut.RLock() + cache, cached := targetRelationUpsertCache[key] + targetRelationUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + targetRelationAllColumns, + targetRelationColumnsWithDefault, + targetRelationColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + targetRelationAllColumns, + targetRelationPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert target_relations, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(targetRelationPrimaryKeyColumns)) + copy(conflict, targetRelationPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"target_relations\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(targetRelationType, targetRelationMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(targetRelationType, targetRelationMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert target_relations") + } + + if !cached { + targetRelationUpsertCacheMut.Lock() + targetRelationUpsertCache[key] = cache + targetRelationUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single TargetRelation record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *TargetRelation) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no TargetRelation provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), targetRelationPrimaryKeyMapping) + sql := "DELETE FROM \"target_relations\" WHERE \"targrel_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from target_relations") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for target_relations") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q targetRelationQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no targetRelationQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from target_relations") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for target_relations") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o TargetRelationSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(targetRelationBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), targetRelationPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"target_relations\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, targetRelationPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from targetRelation slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for target_relations") + } + + if len(targetRelationAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *TargetRelation) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindTargetRelation(ctx, exec, o.TargrelID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *TargetRelationSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := TargetRelationSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), targetRelationPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"target_relations\".* FROM \"target_relations\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, targetRelationPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in TargetRelationSlice") + } + + *o = slice + + return nil +} + +// TargetRelationExists checks if the TargetRelation row exists. +func TargetRelationExists(ctx context.Context, exec boil.ContextExecutor, targrelID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"target_relations\" where \"targrel_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, targrelID) + } + row := exec.QueryRowContext(ctx, sql, targrelID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if target_relations exists") + } + + return exists, nil +} diff --git a/models/target_type.go b/models/target_type.go new file mode 100644 index 0000000..45ce60a --- /dev/null +++ b/models/target_type.go @@ -0,0 +1,1147 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// TargetType is an object representing the database table. +type TargetType struct { + TargetType string `boil:"target_type" json:"target_type" toml:"target_type" yaml:"target_type"` + TargetDesc null.String `boil:"target_desc" json:"target_desc,omitempty" toml:"target_desc" yaml:"target_desc,omitempty"` + ParentType null.String `boil:"parent_type" json:"parent_type,omitempty" toml:"parent_type" yaml:"parent_type,omitempty"` + + R *targetTypeR `boil:"-" json:"-" toml:"-" yaml:"-"` + L targetTypeL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var TargetTypeColumns = struct { + TargetType string + TargetDesc string + ParentType string +}{ + TargetType: "target_type", + TargetDesc: "target_desc", + ParentType: "parent_type", +} + +var TargetTypeTableColumns = struct { + TargetType string + TargetDesc string + ParentType string +}{ + TargetType: "target_type.target_type", + TargetDesc: "target_type.target_desc", + ParentType: "target_type.parent_type", +} + +// Generated where + +var TargetTypeWhere = struct { + TargetType whereHelperstring + TargetDesc whereHelpernull_String + ParentType whereHelpernull_String +}{ + TargetType: whereHelperstring{field: "\"target_type\".\"target_type\""}, + TargetDesc: whereHelpernull_String{field: "\"target_type\".\"target_desc\""}, + ParentType: whereHelpernull_String{field: "\"target_type\".\"parent_type\""}, +} + +// TargetTypeRels is where relationship names are stored. +var TargetTypeRels = struct { + TargetDictionaries string +}{ + TargetDictionaries: "TargetDictionaries", +} + +// targetTypeR is where relationships are stored. +type targetTypeR struct { + TargetDictionaries TargetDictionarySlice `boil:"TargetDictionaries" json:"TargetDictionaries" toml:"TargetDictionaries" yaml:"TargetDictionaries"` +} + +// NewStruct creates a new relationship struct +func (*targetTypeR) NewStruct() *targetTypeR { + return &targetTypeR{} +} + +func (r *targetTypeR) GetTargetDictionaries() TargetDictionarySlice { + if r == nil { + return nil + } + return r.TargetDictionaries +} + +// targetTypeL is where Load methods for each relationship are stored. +type targetTypeL struct{} + +var ( + targetTypeAllColumns = []string{"target_type", "target_desc", "parent_type"} + targetTypeColumnsWithoutDefault = []string{"target_type"} + targetTypeColumnsWithDefault = []string{"target_desc", "parent_type"} + targetTypePrimaryKeyColumns = []string{"target_type"} + targetTypeGeneratedColumns = []string{} +) + +type ( + // TargetTypeSlice is an alias for a slice of pointers to TargetType. + // This should almost always be used instead of []TargetType. + TargetTypeSlice []*TargetType + // TargetTypeHook is the signature for custom TargetType hook methods + TargetTypeHook func(context.Context, boil.ContextExecutor, *TargetType) error + + targetTypeQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + targetTypeType = reflect.TypeOf(&TargetType{}) + targetTypeMapping = queries.MakeStructMapping(targetTypeType) + targetTypePrimaryKeyMapping, _ = queries.BindMapping(targetTypeType, targetTypeMapping, targetTypePrimaryKeyColumns) + targetTypeInsertCacheMut sync.RWMutex + targetTypeInsertCache = make(map[string]insertCache) + targetTypeUpdateCacheMut sync.RWMutex + targetTypeUpdateCache = make(map[string]updateCache) + targetTypeUpsertCacheMut sync.RWMutex + targetTypeUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var targetTypeAfterSelectHooks []TargetTypeHook + +var targetTypeBeforeInsertHooks []TargetTypeHook +var targetTypeAfterInsertHooks []TargetTypeHook + +var targetTypeBeforeUpdateHooks []TargetTypeHook +var targetTypeAfterUpdateHooks []TargetTypeHook + +var targetTypeBeforeDeleteHooks []TargetTypeHook +var targetTypeAfterDeleteHooks []TargetTypeHook + +var targetTypeBeforeUpsertHooks []TargetTypeHook +var targetTypeAfterUpsertHooks []TargetTypeHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *TargetType) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range targetTypeAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *TargetType) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range targetTypeBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *TargetType) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range targetTypeAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *TargetType) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range targetTypeBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *TargetType) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range targetTypeAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *TargetType) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range targetTypeBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *TargetType) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range targetTypeAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *TargetType) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range targetTypeBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *TargetType) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range targetTypeAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddTargetTypeHook registers your hook function for all future operations. +func AddTargetTypeHook(hookPoint boil.HookPoint, targetTypeHook TargetTypeHook) { + switch hookPoint { + case boil.AfterSelectHook: + targetTypeAfterSelectHooks = append(targetTypeAfterSelectHooks, targetTypeHook) + case boil.BeforeInsertHook: + targetTypeBeforeInsertHooks = append(targetTypeBeforeInsertHooks, targetTypeHook) + case boil.AfterInsertHook: + targetTypeAfterInsertHooks = append(targetTypeAfterInsertHooks, targetTypeHook) + case boil.BeforeUpdateHook: + targetTypeBeforeUpdateHooks = append(targetTypeBeforeUpdateHooks, targetTypeHook) + case boil.AfterUpdateHook: + targetTypeAfterUpdateHooks = append(targetTypeAfterUpdateHooks, targetTypeHook) + case boil.BeforeDeleteHook: + targetTypeBeforeDeleteHooks = append(targetTypeBeforeDeleteHooks, targetTypeHook) + case boil.AfterDeleteHook: + targetTypeAfterDeleteHooks = append(targetTypeAfterDeleteHooks, targetTypeHook) + case boil.BeforeUpsertHook: + targetTypeBeforeUpsertHooks = append(targetTypeBeforeUpsertHooks, targetTypeHook) + case boil.AfterUpsertHook: + targetTypeAfterUpsertHooks = append(targetTypeAfterUpsertHooks, targetTypeHook) + } +} + +// One returns a single targetType record from the query. +func (q targetTypeQuery) One(ctx context.Context, exec boil.ContextExecutor) (*TargetType, error) { + o := &TargetType{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for target_type") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all TargetType records from the query. +func (q targetTypeQuery) All(ctx context.Context, exec boil.ContextExecutor) (TargetTypeSlice, error) { + var o []*TargetType + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to TargetType slice") + } + + if len(targetTypeAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all TargetType records in the query. +func (q targetTypeQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count target_type rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q targetTypeQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if target_type exists") + } + + return count > 0, nil +} + +// TargetDictionaries retrieves all the target_dictionary's TargetDictionaries with an executor. +func (o *TargetType) TargetDictionaries(mods ...qm.QueryMod) targetDictionaryQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"target_dictionary\".\"target_type\"=?", o.TargetType), + ) + + return TargetDictionaries(queryMods...) +} + +// LoadTargetDictionaries allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (targetTypeL) LoadTargetDictionaries(ctx context.Context, e boil.ContextExecutor, singular bool, maybeTargetType interface{}, mods queries.Applicator) error { + var slice []*TargetType + var object *TargetType + + if singular { + object = maybeTargetType.(*TargetType) + } else { + slice = *maybeTargetType.(*[]*TargetType) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &targetTypeR{} + } + args = append(args, object.TargetType) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &targetTypeR{} + } + + for _, a := range args { + if queries.Equal(a, obj.TargetType) { + continue Outer + } + } + + args = append(args, obj.TargetType) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`target_dictionary`), + qm.WhereIn(`target_dictionary.target_type in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load target_dictionary") + } + + var resultSlice []*TargetDictionary + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice target_dictionary") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on target_dictionary") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for target_dictionary") + } + + if len(targetDictionaryAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.TargetDictionaries = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &targetDictionaryR{} + } + foreign.R.TargetDictionaryTargetType = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.TargetType, foreign.TargetType) { + local.R.TargetDictionaries = append(local.R.TargetDictionaries, foreign) + if foreign.R == nil { + foreign.R = &targetDictionaryR{} + } + foreign.R.TargetDictionaryTargetType = local + break + } + } + } + + return nil +} + +// AddTargetDictionaries adds the given related objects to the existing relationships +// of the target_type, optionally inserting them as new records. +// Appends related to o.R.TargetDictionaries. +// Sets related.R.TargetDictionaryTargetType appropriately. +func (o *TargetType) AddTargetDictionaries(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*TargetDictionary) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.TargetType, o.TargetType) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"target_dictionary\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"target_type"}), + strmangle.WhereClause("\"", "\"", 0, targetDictionaryPrimaryKeyColumns), + ) + values := []interface{}{o.TargetType, rel.Tid} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.TargetType, o.TargetType) + } + } + + if o.R == nil { + o.R = &targetTypeR{ + TargetDictionaries: related, + } + } else { + o.R.TargetDictionaries = append(o.R.TargetDictionaries, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &targetDictionaryR{ + TargetDictionaryTargetType: o, + } + } else { + rel.R.TargetDictionaryTargetType = o + } + } + return nil +} + +// SetTargetDictionaries removes all previously related items of the +// target_type replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.TargetDictionaryTargetType's TargetDictionaries accordingly. +// Replaces o.R.TargetDictionaries with related. +// Sets related.R.TargetDictionaryTargetType's TargetDictionaries accordingly. +func (o *TargetType) SetTargetDictionaries(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*TargetDictionary) error { + query := "update \"target_dictionary\" set \"target_type\" = null where \"target_type\" = ?" + values := []interface{}{o.TargetType} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + if o.R != nil { + for _, rel := range o.R.TargetDictionaries { + queries.SetScanner(&rel.TargetType, nil) + if rel.R == nil { + continue + } + + rel.R.TargetDictionaryTargetType = nil + } + o.R.TargetDictionaries = nil + } + + return o.AddTargetDictionaries(ctx, exec, insert, related...) +} + +// RemoveTargetDictionaries relationships from objects passed in. +// Removes related items from R.TargetDictionaries (uses pointer comparison, removal does not keep order) +// Sets related.R.TargetDictionaryTargetType. +func (o *TargetType) RemoveTargetDictionaries(ctx context.Context, exec boil.ContextExecutor, related ...*TargetDictionary) error { + if len(related) == 0 { + return nil + } + + var err error + for _, rel := range related { + queries.SetScanner(&rel.TargetType, nil) + if rel.R != nil { + rel.R.TargetDictionaryTargetType = nil + } + if _, err = rel.Update(ctx, exec, boil.Whitelist("target_type")); err != nil { + return err + } + } + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.TargetDictionaries { + if rel != ri { + continue + } + + ln := len(o.R.TargetDictionaries) + if ln > 1 && i < ln-1 { + o.R.TargetDictionaries[i] = o.R.TargetDictionaries[ln-1] + } + o.R.TargetDictionaries = o.R.TargetDictionaries[:ln-1] + break + } + } + + return nil +} + +// TargetTypes retrieves all the records using an executor. +func TargetTypes(mods ...qm.QueryMod) targetTypeQuery { + mods = append(mods, qm.From("\"target_type\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"target_type\".*"}) + } + + return targetTypeQuery{q} +} + +// FindTargetType retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindTargetType(ctx context.Context, exec boil.ContextExecutor, targetType string, selectCols ...string) (*TargetType, error) { + targetTypeObj := &TargetType{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"target_type\" where \"target_type\"=?", sel, + ) + + q := queries.Raw(query, targetType) + + err := q.Bind(ctx, exec, targetTypeObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from target_type") + } + + if err = targetTypeObj.doAfterSelectHooks(ctx, exec); err != nil { + return targetTypeObj, err + } + + return targetTypeObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *TargetType) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no target_type provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(targetTypeColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + targetTypeInsertCacheMut.RLock() + cache, cached := targetTypeInsertCache[key] + targetTypeInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + targetTypeAllColumns, + targetTypeColumnsWithDefault, + targetTypeColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(targetTypeType, targetTypeMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(targetTypeType, targetTypeMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"target_type\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"target_type\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into target_type") + } + + if !cached { + targetTypeInsertCacheMut.Lock() + targetTypeInsertCache[key] = cache + targetTypeInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the TargetType. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *TargetType) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + targetTypeUpdateCacheMut.RLock() + cache, cached := targetTypeUpdateCache[key] + targetTypeUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + targetTypeAllColumns, + targetTypePrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update target_type, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"target_type\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, targetTypePrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(targetTypeType, targetTypeMapping, append(wl, targetTypePrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update target_type row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for target_type") + } + + if !cached { + targetTypeUpdateCacheMut.Lock() + targetTypeUpdateCache[key] = cache + targetTypeUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q targetTypeQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for target_type") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for target_type") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o TargetTypeSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), targetTypePrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"target_type\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, targetTypePrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in targetType slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all targetType") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *TargetType) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no target_type provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(targetTypeColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + targetTypeUpsertCacheMut.RLock() + cache, cached := targetTypeUpsertCache[key] + targetTypeUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + targetTypeAllColumns, + targetTypeColumnsWithDefault, + targetTypeColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + targetTypeAllColumns, + targetTypePrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert target_type, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(targetTypePrimaryKeyColumns)) + copy(conflict, targetTypePrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"target_type\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(targetTypeType, targetTypeMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(targetTypeType, targetTypeMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert target_type") + } + + if !cached { + targetTypeUpsertCacheMut.Lock() + targetTypeUpsertCache[key] = cache + targetTypeUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single TargetType record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *TargetType) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no TargetType provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), targetTypePrimaryKeyMapping) + sql := "DELETE FROM \"target_type\" WHERE \"target_type\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from target_type") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for target_type") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q targetTypeQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no targetTypeQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from target_type") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for target_type") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o TargetTypeSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(targetTypeBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), targetTypePrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"target_type\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, targetTypePrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from targetType slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for target_type") + } + + if len(targetTypeAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *TargetType) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindTargetType(ctx, exec, o.TargetType) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *TargetTypeSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := TargetTypeSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), targetTypePrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"target_type\".* FROM \"target_type\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, targetTypePrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in TargetTypeSlice") + } + + *o = slice + + return nil +} + +// TargetTypeExists checks if the TargetType row exists. +func TargetTypeExists(ctx context.Context, exec boil.ContextExecutor, targetType string) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"target_type\" where \"target_type\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, targetType) + } + row := exec.QueryRowContext(ctx, sql, targetType) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if target_type exists") + } + + return exists, nil +} diff --git a/models/tissue_dictionary.go b/models/tissue_dictionary.go new file mode 100644 index 0000000..8f4c021 --- /dev/null +++ b/models/tissue_dictionary.go @@ -0,0 +1,1347 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// TissueDictionary is an object representing the database table. +type TissueDictionary struct { + TissueID int64 `boil:"tissue_id" json:"tissue_id" toml:"tissue_id" yaml:"tissue_id"` + UberonID null.String `boil:"uberon_id" json:"uberon_id,omitempty" toml:"uberon_id" yaml:"uberon_id,omitempty"` + PrefName string `boil:"pref_name" json:"pref_name" toml:"pref_name" yaml:"pref_name"` + EfoID null.String `boil:"efo_id" json:"efo_id,omitempty" toml:"efo_id" yaml:"efo_id,omitempty"` + ChemblID string `boil:"chembl_id" json:"chembl_id" toml:"chembl_id" yaml:"chembl_id"` + BtoID null.String `boil:"bto_id" json:"bto_id,omitempty" toml:"bto_id" yaml:"bto_id,omitempty"` + CalohaID null.String `boil:"caloha_id" json:"caloha_id,omitempty" toml:"caloha_id" yaml:"caloha_id,omitempty"` + + R *tissueDictionaryR `boil:"-" json:"-" toml:"-" yaml:"-"` + L tissueDictionaryL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var TissueDictionaryColumns = struct { + TissueID string + UberonID string + PrefName string + EfoID string + ChemblID string + BtoID string + CalohaID string +}{ + TissueID: "tissue_id", + UberonID: "uberon_id", + PrefName: "pref_name", + EfoID: "efo_id", + ChemblID: "chembl_id", + BtoID: "bto_id", + CalohaID: "caloha_id", +} + +var TissueDictionaryTableColumns = struct { + TissueID string + UberonID string + PrefName string + EfoID string + ChemblID string + BtoID string + CalohaID string +}{ + TissueID: "tissue_dictionary.tissue_id", + UberonID: "tissue_dictionary.uberon_id", + PrefName: "tissue_dictionary.pref_name", + EfoID: "tissue_dictionary.efo_id", + ChemblID: "tissue_dictionary.chembl_id", + BtoID: "tissue_dictionary.bto_id", + CalohaID: "tissue_dictionary.caloha_id", +} + +// Generated where + +var TissueDictionaryWhere = struct { + TissueID whereHelperint64 + UberonID whereHelpernull_String + PrefName whereHelperstring + EfoID whereHelpernull_String + ChemblID whereHelperstring + BtoID whereHelpernull_String + CalohaID whereHelpernull_String +}{ + TissueID: whereHelperint64{field: "\"tissue_dictionary\".\"tissue_id\""}, + UberonID: whereHelpernull_String{field: "\"tissue_dictionary\".\"uberon_id\""}, + PrefName: whereHelperstring{field: "\"tissue_dictionary\".\"pref_name\""}, + EfoID: whereHelpernull_String{field: "\"tissue_dictionary\".\"efo_id\""}, + ChemblID: whereHelperstring{field: "\"tissue_dictionary\".\"chembl_id\""}, + BtoID: whereHelpernull_String{field: "\"tissue_dictionary\".\"bto_id\""}, + CalohaID: whereHelpernull_String{field: "\"tissue_dictionary\".\"caloha_id\""}, +} + +// TissueDictionaryRels is where relationship names are stored. +var TissueDictionaryRels = struct { + Chembl string + TissueAssays string +}{ + Chembl: "Chembl", + TissueAssays: "TissueAssays", +} + +// tissueDictionaryR is where relationships are stored. +type tissueDictionaryR struct { + Chembl *ChemblIDLookup `boil:"Chembl" json:"Chembl" toml:"Chembl" yaml:"Chembl"` + TissueAssays AssaySlice `boil:"TissueAssays" json:"TissueAssays" toml:"TissueAssays" yaml:"TissueAssays"` +} + +// NewStruct creates a new relationship struct +func (*tissueDictionaryR) NewStruct() *tissueDictionaryR { + return &tissueDictionaryR{} +} + +func (r *tissueDictionaryR) GetChembl() *ChemblIDLookup { + if r == nil { + return nil + } + return r.Chembl +} + +func (r *tissueDictionaryR) GetTissueAssays() AssaySlice { + if r == nil { + return nil + } + return r.TissueAssays +} + +// tissueDictionaryL is where Load methods for each relationship are stored. +type tissueDictionaryL struct{} + +var ( + tissueDictionaryAllColumns = []string{"tissue_id", "uberon_id", "pref_name", "efo_id", "chembl_id", "bto_id", "caloha_id"} + tissueDictionaryColumnsWithoutDefault = []string{"tissue_id", "pref_name", "chembl_id"} + tissueDictionaryColumnsWithDefault = []string{"uberon_id", "efo_id", "bto_id", "caloha_id"} + tissueDictionaryPrimaryKeyColumns = []string{"tissue_id"} + tissueDictionaryGeneratedColumns = []string{} +) + +type ( + // TissueDictionarySlice is an alias for a slice of pointers to TissueDictionary. + // This should almost always be used instead of []TissueDictionary. + TissueDictionarySlice []*TissueDictionary + // TissueDictionaryHook is the signature for custom TissueDictionary hook methods + TissueDictionaryHook func(context.Context, boil.ContextExecutor, *TissueDictionary) error + + tissueDictionaryQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + tissueDictionaryType = reflect.TypeOf(&TissueDictionary{}) + tissueDictionaryMapping = queries.MakeStructMapping(tissueDictionaryType) + tissueDictionaryPrimaryKeyMapping, _ = queries.BindMapping(tissueDictionaryType, tissueDictionaryMapping, tissueDictionaryPrimaryKeyColumns) + tissueDictionaryInsertCacheMut sync.RWMutex + tissueDictionaryInsertCache = make(map[string]insertCache) + tissueDictionaryUpdateCacheMut sync.RWMutex + tissueDictionaryUpdateCache = make(map[string]updateCache) + tissueDictionaryUpsertCacheMut sync.RWMutex + tissueDictionaryUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var tissueDictionaryAfterSelectHooks []TissueDictionaryHook + +var tissueDictionaryBeforeInsertHooks []TissueDictionaryHook +var tissueDictionaryAfterInsertHooks []TissueDictionaryHook + +var tissueDictionaryBeforeUpdateHooks []TissueDictionaryHook +var tissueDictionaryAfterUpdateHooks []TissueDictionaryHook + +var tissueDictionaryBeforeDeleteHooks []TissueDictionaryHook +var tissueDictionaryAfterDeleteHooks []TissueDictionaryHook + +var tissueDictionaryBeforeUpsertHooks []TissueDictionaryHook +var tissueDictionaryAfterUpsertHooks []TissueDictionaryHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *TissueDictionary) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range tissueDictionaryAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *TissueDictionary) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range tissueDictionaryBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *TissueDictionary) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range tissueDictionaryAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *TissueDictionary) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range tissueDictionaryBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *TissueDictionary) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range tissueDictionaryAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *TissueDictionary) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range tissueDictionaryBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *TissueDictionary) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range tissueDictionaryAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *TissueDictionary) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range tissueDictionaryBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *TissueDictionary) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range tissueDictionaryAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddTissueDictionaryHook registers your hook function for all future operations. +func AddTissueDictionaryHook(hookPoint boil.HookPoint, tissueDictionaryHook TissueDictionaryHook) { + switch hookPoint { + case boil.AfterSelectHook: + tissueDictionaryAfterSelectHooks = append(tissueDictionaryAfterSelectHooks, tissueDictionaryHook) + case boil.BeforeInsertHook: + tissueDictionaryBeforeInsertHooks = append(tissueDictionaryBeforeInsertHooks, tissueDictionaryHook) + case boil.AfterInsertHook: + tissueDictionaryAfterInsertHooks = append(tissueDictionaryAfterInsertHooks, tissueDictionaryHook) + case boil.BeforeUpdateHook: + tissueDictionaryBeforeUpdateHooks = append(tissueDictionaryBeforeUpdateHooks, tissueDictionaryHook) + case boil.AfterUpdateHook: + tissueDictionaryAfterUpdateHooks = append(tissueDictionaryAfterUpdateHooks, tissueDictionaryHook) + case boil.BeforeDeleteHook: + tissueDictionaryBeforeDeleteHooks = append(tissueDictionaryBeforeDeleteHooks, tissueDictionaryHook) + case boil.AfterDeleteHook: + tissueDictionaryAfterDeleteHooks = append(tissueDictionaryAfterDeleteHooks, tissueDictionaryHook) + case boil.BeforeUpsertHook: + tissueDictionaryBeforeUpsertHooks = append(tissueDictionaryBeforeUpsertHooks, tissueDictionaryHook) + case boil.AfterUpsertHook: + tissueDictionaryAfterUpsertHooks = append(tissueDictionaryAfterUpsertHooks, tissueDictionaryHook) + } +} + +// One returns a single tissueDictionary record from the query. +func (q tissueDictionaryQuery) One(ctx context.Context, exec boil.ContextExecutor) (*TissueDictionary, error) { + o := &TissueDictionary{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for tissue_dictionary") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all TissueDictionary records from the query. +func (q tissueDictionaryQuery) All(ctx context.Context, exec boil.ContextExecutor) (TissueDictionarySlice, error) { + var o []*TissueDictionary + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to TissueDictionary slice") + } + + if len(tissueDictionaryAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all TissueDictionary records in the query. +func (q tissueDictionaryQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count tissue_dictionary rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q tissueDictionaryQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if tissue_dictionary exists") + } + + return count > 0, nil +} + +// Chembl pointed to by the foreign key. +func (o *TissueDictionary) Chembl(mods ...qm.QueryMod) chemblIDLookupQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"chembl_id\" = ?", o.ChemblID), + } + + queryMods = append(queryMods, mods...) + + return ChemblIDLookups(queryMods...) +} + +// TissueAssays retrieves all the assay's Assays with an executor via tissue_id column. +func (o *TissueDictionary) TissueAssays(mods ...qm.QueryMod) assayQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"assays\".\"tissue_id\"=?", o.TissueID), + ) + + return Assays(queryMods...) +} + +// LoadChembl allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (tissueDictionaryL) LoadChembl(ctx context.Context, e boil.ContextExecutor, singular bool, maybeTissueDictionary interface{}, mods queries.Applicator) error { + var slice []*TissueDictionary + var object *TissueDictionary + + if singular { + object = maybeTissueDictionary.(*TissueDictionary) + } else { + slice = *maybeTissueDictionary.(*[]*TissueDictionary) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &tissueDictionaryR{} + } + args = append(args, object.ChemblID) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &tissueDictionaryR{} + } + + for _, a := range args { + if a == obj.ChemblID { + continue Outer + } + } + + args = append(args, obj.ChemblID) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`chembl_id_lookup`), + qm.WhereIn(`chembl_id_lookup.chembl_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load ChemblIDLookup") + } + + var resultSlice []*ChemblIDLookup + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice ChemblIDLookup") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for chembl_id_lookup") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for chembl_id_lookup") + } + + if len(tissueDictionaryAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Chembl = foreign + if foreign.R == nil { + foreign.R = &chemblIDLookupR{} + } + foreign.R.ChemblTissueDictionary = object + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.ChemblID == foreign.ChemblID { + local.R.Chembl = foreign + if foreign.R == nil { + foreign.R = &chemblIDLookupR{} + } + foreign.R.ChemblTissueDictionary = local + break + } + } + } + + return nil +} + +// LoadTissueAssays allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (tissueDictionaryL) LoadTissueAssays(ctx context.Context, e boil.ContextExecutor, singular bool, maybeTissueDictionary interface{}, mods queries.Applicator) error { + var slice []*TissueDictionary + var object *TissueDictionary + + if singular { + object = maybeTissueDictionary.(*TissueDictionary) + } else { + slice = *maybeTissueDictionary.(*[]*TissueDictionary) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &tissueDictionaryR{} + } + args = append(args, object.TissueID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &tissueDictionaryR{} + } + + for _, a := range args { + if queries.Equal(a, obj.TissueID) { + continue Outer + } + } + + args = append(args, obj.TissueID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`assays`), + qm.WhereIn(`assays.tissue_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load assays") + } + + var resultSlice []*Assay + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice assays") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on assays") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for assays") + } + + if len(assayAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.TissueAssays = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &assayR{} + } + foreign.R.Tissue = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.TissueID, foreign.TissueID) { + local.R.TissueAssays = append(local.R.TissueAssays, foreign) + if foreign.R == nil { + foreign.R = &assayR{} + } + foreign.R.Tissue = local + break + } + } + } + + return nil +} + +// SetChembl of the tissueDictionary to the related item. +// Sets o.R.Chembl to related. +// Adds o to related.R.ChemblTissueDictionary. +func (o *TissueDictionary) SetChembl(ctx context.Context, exec boil.ContextExecutor, insert bool, related *ChemblIDLookup) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"tissue_dictionary\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"chembl_id"}), + strmangle.WhereClause("\"", "\"", 0, tissueDictionaryPrimaryKeyColumns), + ) + values := []interface{}{related.ChemblID, o.TissueID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.ChemblID = related.ChemblID + if o.R == nil { + o.R = &tissueDictionaryR{ + Chembl: related, + } + } else { + o.R.Chembl = related + } + + if related.R == nil { + related.R = &chemblIDLookupR{ + ChemblTissueDictionary: o, + } + } else { + related.R.ChemblTissueDictionary = o + } + + return nil +} + +// AddTissueAssays adds the given related objects to the existing relationships +// of the tissue_dictionary, optionally inserting them as new records. +// Appends related to o.R.TissueAssays. +// Sets related.R.Tissue appropriately. +func (o *TissueDictionary) AddTissueAssays(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Assay) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.TissueID, o.TissueID) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"assays\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"tissue_id"}), + strmangle.WhereClause("\"", "\"", 0, assayPrimaryKeyColumns), + ) + values := []interface{}{o.TissueID, rel.AssayID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.TissueID, o.TissueID) + } + } + + if o.R == nil { + o.R = &tissueDictionaryR{ + TissueAssays: related, + } + } else { + o.R.TissueAssays = append(o.R.TissueAssays, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &assayR{ + Tissue: o, + } + } else { + rel.R.Tissue = o + } + } + return nil +} + +// SetTissueAssays removes all previously related items of the +// tissue_dictionary replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.Tissue's TissueAssays accordingly. +// Replaces o.R.TissueAssays with related. +// Sets related.R.Tissue's TissueAssays accordingly. +func (o *TissueDictionary) SetTissueAssays(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Assay) error { + query := "update \"assays\" set \"tissue_id\" = null where \"tissue_id\" = ?" + values := []interface{}{o.TissueID} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + if o.R != nil { + for _, rel := range o.R.TissueAssays { + queries.SetScanner(&rel.TissueID, nil) + if rel.R == nil { + continue + } + + rel.R.Tissue = nil + } + o.R.TissueAssays = nil + } + + return o.AddTissueAssays(ctx, exec, insert, related...) +} + +// RemoveTissueAssays relationships from objects passed in. +// Removes related items from R.TissueAssays (uses pointer comparison, removal does not keep order) +// Sets related.R.Tissue. +func (o *TissueDictionary) RemoveTissueAssays(ctx context.Context, exec boil.ContextExecutor, related ...*Assay) error { + if len(related) == 0 { + return nil + } + + var err error + for _, rel := range related { + queries.SetScanner(&rel.TissueID, nil) + if rel.R != nil { + rel.R.Tissue = nil + } + if _, err = rel.Update(ctx, exec, boil.Whitelist("tissue_id")); err != nil { + return err + } + } + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.TissueAssays { + if rel != ri { + continue + } + + ln := len(o.R.TissueAssays) + if ln > 1 && i < ln-1 { + o.R.TissueAssays[i] = o.R.TissueAssays[ln-1] + } + o.R.TissueAssays = o.R.TissueAssays[:ln-1] + break + } + } + + return nil +} + +// TissueDictionaries retrieves all the records using an executor. +func TissueDictionaries(mods ...qm.QueryMod) tissueDictionaryQuery { + mods = append(mods, qm.From("\"tissue_dictionary\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"tissue_dictionary\".*"}) + } + + return tissueDictionaryQuery{q} +} + +// FindTissueDictionary retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindTissueDictionary(ctx context.Context, exec boil.ContextExecutor, tissueID int64, selectCols ...string) (*TissueDictionary, error) { + tissueDictionaryObj := &TissueDictionary{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"tissue_dictionary\" where \"tissue_id\"=?", sel, + ) + + q := queries.Raw(query, tissueID) + + err := q.Bind(ctx, exec, tissueDictionaryObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from tissue_dictionary") + } + + if err = tissueDictionaryObj.doAfterSelectHooks(ctx, exec); err != nil { + return tissueDictionaryObj, err + } + + return tissueDictionaryObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *TissueDictionary) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no tissue_dictionary provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(tissueDictionaryColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + tissueDictionaryInsertCacheMut.RLock() + cache, cached := tissueDictionaryInsertCache[key] + tissueDictionaryInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + tissueDictionaryAllColumns, + tissueDictionaryColumnsWithDefault, + tissueDictionaryColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(tissueDictionaryType, tissueDictionaryMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(tissueDictionaryType, tissueDictionaryMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"tissue_dictionary\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"tissue_dictionary\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into tissue_dictionary") + } + + if !cached { + tissueDictionaryInsertCacheMut.Lock() + tissueDictionaryInsertCache[key] = cache + tissueDictionaryInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the TissueDictionary. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *TissueDictionary) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + tissueDictionaryUpdateCacheMut.RLock() + cache, cached := tissueDictionaryUpdateCache[key] + tissueDictionaryUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + tissueDictionaryAllColumns, + tissueDictionaryPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update tissue_dictionary, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"tissue_dictionary\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, tissueDictionaryPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(tissueDictionaryType, tissueDictionaryMapping, append(wl, tissueDictionaryPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update tissue_dictionary row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for tissue_dictionary") + } + + if !cached { + tissueDictionaryUpdateCacheMut.Lock() + tissueDictionaryUpdateCache[key] = cache + tissueDictionaryUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q tissueDictionaryQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for tissue_dictionary") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for tissue_dictionary") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o TissueDictionarySlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), tissueDictionaryPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"tissue_dictionary\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, tissueDictionaryPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in tissueDictionary slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all tissueDictionary") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *TissueDictionary) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no tissue_dictionary provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(tissueDictionaryColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + tissueDictionaryUpsertCacheMut.RLock() + cache, cached := tissueDictionaryUpsertCache[key] + tissueDictionaryUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + tissueDictionaryAllColumns, + tissueDictionaryColumnsWithDefault, + tissueDictionaryColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + tissueDictionaryAllColumns, + tissueDictionaryPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert tissue_dictionary, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(tissueDictionaryPrimaryKeyColumns)) + copy(conflict, tissueDictionaryPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"tissue_dictionary\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(tissueDictionaryType, tissueDictionaryMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(tissueDictionaryType, tissueDictionaryMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert tissue_dictionary") + } + + if !cached { + tissueDictionaryUpsertCacheMut.Lock() + tissueDictionaryUpsertCache[key] = cache + tissueDictionaryUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single TissueDictionary record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *TissueDictionary) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no TissueDictionary provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), tissueDictionaryPrimaryKeyMapping) + sql := "DELETE FROM \"tissue_dictionary\" WHERE \"tissue_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from tissue_dictionary") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for tissue_dictionary") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q tissueDictionaryQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no tissueDictionaryQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from tissue_dictionary") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for tissue_dictionary") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o TissueDictionarySlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(tissueDictionaryBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), tissueDictionaryPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"tissue_dictionary\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, tissueDictionaryPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from tissueDictionary slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for tissue_dictionary") + } + + if len(tissueDictionaryAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *TissueDictionary) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindTissueDictionary(ctx, exec, o.TissueID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *TissueDictionarySlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := TissueDictionarySlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), tissueDictionaryPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"tissue_dictionary\".* FROM \"tissue_dictionary\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, tissueDictionaryPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in TissueDictionarySlice") + } + + *o = slice + + return nil +} + +// TissueDictionaryExists checks if the TissueDictionary row exists. +func TissueDictionaryExists(ctx context.Context, exec boil.ContextExecutor, tissueID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"tissue_dictionary\" where \"tissue_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, tissueID) + } + row := exec.QueryRowContext(ctx, sql, tissueID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if tissue_dictionary exists") + } + + return exists, nil +} diff --git a/models/uniprot.go b/models/uniprot.go new file mode 100644 index 0000000..90f3957 --- /dev/null +++ b/models/uniprot.go @@ -0,0 +1,1355 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// Uniprot is an object representing the database table. +type Uniprot struct { + Accession null.String `boil:"accession" json:"accession,omitempty" toml:"accession" yaml:"accession,omitempty"` + Database string `boil:"database" json:"database" toml:"database" yaml:"database"` + Seqhash string `boil:"seqhash" json:"seqhash" toml:"seqhash" yaml:"seqhash"` + + R *uniprotR `boil:"-" json:"-" toml:"-" yaml:"-"` + L uniprotL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var UniprotColumns = struct { + Accession string + Database string + Seqhash string +}{ + Accession: "accession", + Database: "database", + Seqhash: "seqhash", +} + +var UniprotTableColumns = struct { + Accession string + Database string + Seqhash string +}{ + Accession: "uniprot.accession", + Database: "uniprot.database", + Seqhash: "uniprot.seqhash", +} + +// Generated where + +var UniprotWhere = struct { + Accession whereHelpernull_String + Database whereHelperstring + Seqhash whereHelperstring +}{ + Accession: whereHelpernull_String{field: "\"uniprot\".\"accession\""}, + Database: whereHelperstring{field: "\"uniprot\".\"database\""}, + Seqhash: whereHelperstring{field: "\"uniprot\".\"seqhash\""}, +} + +// UniprotRels is where relationship names are stored. +var UniprotRels = struct { + UniprotSeqhash string + Reactions string +}{ + UniprotSeqhash: "UniprotSeqhash", + Reactions: "Reactions", +} + +// uniprotR is where relationships are stored. +type uniprotR struct { + UniprotSeqhash *Seqhash `boil:"UniprotSeqhash" json:"UniprotSeqhash" toml:"UniprotSeqhash" yaml:"UniprotSeqhash"` + Reactions ReactionSlice `boil:"Reactions" json:"Reactions" toml:"Reactions" yaml:"Reactions"` +} + +// NewStruct creates a new relationship struct +func (*uniprotR) NewStruct() *uniprotR { + return &uniprotR{} +} + +func (r *uniprotR) GetUniprotSeqhash() *Seqhash { + if r == nil { + return nil + } + return r.UniprotSeqhash +} + +func (r *uniprotR) GetReactions() ReactionSlice { + if r == nil { + return nil + } + return r.Reactions +} + +// uniprotL is where Load methods for each relationship are stored. +type uniprotL struct{} + +var ( + uniprotAllColumns = []string{"accession", "database", "seqhash"} + uniprotColumnsWithoutDefault = []string{"database", "seqhash"} + uniprotColumnsWithDefault = []string{"accession"} + uniprotPrimaryKeyColumns = []string{"accession"} + uniprotGeneratedColumns = []string{} +) + +type ( + // UniprotSlice is an alias for a slice of pointers to Uniprot. + // This should almost always be used instead of []Uniprot. + UniprotSlice []*Uniprot + // UniprotHook is the signature for custom Uniprot hook methods + UniprotHook func(context.Context, boil.ContextExecutor, *Uniprot) error + + uniprotQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + uniprotType = reflect.TypeOf(&Uniprot{}) + uniprotMapping = queries.MakeStructMapping(uniprotType) + uniprotPrimaryKeyMapping, _ = queries.BindMapping(uniprotType, uniprotMapping, uniprotPrimaryKeyColumns) + uniprotInsertCacheMut sync.RWMutex + uniprotInsertCache = make(map[string]insertCache) + uniprotUpdateCacheMut sync.RWMutex + uniprotUpdateCache = make(map[string]updateCache) + uniprotUpsertCacheMut sync.RWMutex + uniprotUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var uniprotAfterSelectHooks []UniprotHook + +var uniprotBeforeInsertHooks []UniprotHook +var uniprotAfterInsertHooks []UniprotHook + +var uniprotBeforeUpdateHooks []UniprotHook +var uniprotAfterUpdateHooks []UniprotHook + +var uniprotBeforeDeleteHooks []UniprotHook +var uniprotAfterDeleteHooks []UniprotHook + +var uniprotBeforeUpsertHooks []UniprotHook +var uniprotAfterUpsertHooks []UniprotHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *Uniprot) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range uniprotAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *Uniprot) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range uniprotBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *Uniprot) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range uniprotAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *Uniprot) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range uniprotBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *Uniprot) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range uniprotAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *Uniprot) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range uniprotBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *Uniprot) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range uniprotAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *Uniprot) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range uniprotBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *Uniprot) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range uniprotAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddUniprotHook registers your hook function for all future operations. +func AddUniprotHook(hookPoint boil.HookPoint, uniprotHook UniprotHook) { + switch hookPoint { + case boil.AfterSelectHook: + uniprotAfterSelectHooks = append(uniprotAfterSelectHooks, uniprotHook) + case boil.BeforeInsertHook: + uniprotBeforeInsertHooks = append(uniprotBeforeInsertHooks, uniprotHook) + case boil.AfterInsertHook: + uniprotAfterInsertHooks = append(uniprotAfterInsertHooks, uniprotHook) + case boil.BeforeUpdateHook: + uniprotBeforeUpdateHooks = append(uniprotBeforeUpdateHooks, uniprotHook) + case boil.AfterUpdateHook: + uniprotAfterUpdateHooks = append(uniprotAfterUpdateHooks, uniprotHook) + case boil.BeforeDeleteHook: + uniprotBeforeDeleteHooks = append(uniprotBeforeDeleteHooks, uniprotHook) + case boil.AfterDeleteHook: + uniprotAfterDeleteHooks = append(uniprotAfterDeleteHooks, uniprotHook) + case boil.BeforeUpsertHook: + uniprotBeforeUpsertHooks = append(uniprotBeforeUpsertHooks, uniprotHook) + case boil.AfterUpsertHook: + uniprotAfterUpsertHooks = append(uniprotAfterUpsertHooks, uniprotHook) + } +} + +// One returns a single uniprot record from the query. +func (q uniprotQuery) One(ctx context.Context, exec boil.ContextExecutor) (*Uniprot, error) { + o := &Uniprot{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for uniprot") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all Uniprot records from the query. +func (q uniprotQuery) All(ctx context.Context, exec boil.ContextExecutor) (UniprotSlice, error) { + var o []*Uniprot + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to Uniprot slice") + } + + if len(uniprotAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all Uniprot records in the query. +func (q uniprotQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count uniprot rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q uniprotQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if uniprot exists") + } + + return count > 0, nil +} + +// UniprotSeqhash pointed to by the foreign key. +func (o *Uniprot) UniprotSeqhash(mods ...qm.QueryMod) seqhashQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"seqhash\" = ?", o.Seqhash), + } + + queryMods = append(queryMods, mods...) + + return Seqhashes(queryMods...) +} + +// Reactions retrieves all the reaction's Reactions with an executor. +func (o *Uniprot) Reactions(mods ...qm.QueryMod) reactionQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.InnerJoin("\"uniprot_to_reaction\" on \"reaction\".\"accession\" = \"uniprot_to_reaction\".\"reaction\""), + qm.Where("\"uniprot_to_reaction\".\"uniprot\"=?", o.Accession), + ) + + return Reactions(queryMods...) +} + +// LoadUniprotSeqhash allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (uniprotL) LoadUniprotSeqhash(ctx context.Context, e boil.ContextExecutor, singular bool, maybeUniprot interface{}, mods queries.Applicator) error { + var slice []*Uniprot + var object *Uniprot + + if singular { + object = maybeUniprot.(*Uniprot) + } else { + slice = *maybeUniprot.(*[]*Uniprot) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &uniprotR{} + } + args = append(args, object.Seqhash) + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &uniprotR{} + } + + for _, a := range args { + if a == obj.Seqhash { + continue Outer + } + } + + args = append(args, obj.Seqhash) + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`seqhash`), + qm.WhereIn(`seqhash.seqhash in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load Seqhash") + } + + var resultSlice []*Seqhash + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice Seqhash") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for seqhash") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for seqhash") + } + + if len(uniprotAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.UniprotSeqhash = foreign + if foreign.R == nil { + foreign.R = &seqhashR{} + } + foreign.R.Uniprots = append(foreign.R.Uniprots, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.Seqhash == foreign.Seqhash { + local.R.UniprotSeqhash = foreign + if foreign.R == nil { + foreign.R = &seqhashR{} + } + foreign.R.Uniprots = append(foreign.R.Uniprots, local) + break + } + } + } + + return nil +} + +// LoadReactions allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (uniprotL) LoadReactions(ctx context.Context, e boil.ContextExecutor, singular bool, maybeUniprot interface{}, mods queries.Applicator) error { + var slice []*Uniprot + var object *Uniprot + + if singular { + object = maybeUniprot.(*Uniprot) + } else { + slice = *maybeUniprot.(*[]*Uniprot) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &uniprotR{} + } + args = append(args, object.Accession) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &uniprotR{} + } + + for _, a := range args { + if queries.Equal(a, obj.Accession) { + continue Outer + } + } + + args = append(args, obj.Accession) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.Select("\"reaction\".\"id\", \"reaction\".\"directional\", \"reaction\".\"accession\", \"reaction\".\"status\", \"reaction\".\"comment\", \"reaction\".\"equation\", \"reaction\".\"html_equation\", \"reaction\".\"is_chemically_balanced\", \"reaction\".\"is_transport\", \"reaction\".\"ec\", \"reaction\".\"location\", \"a\".\"uniprot\""), + qm.From("\"reaction\""), + qm.InnerJoin("\"uniprot_to_reaction\" as \"a\" on \"reaction\".\"accession\" = \"a\".\"reaction\""), + qm.WhereIn("\"a\".\"uniprot\" in ?", args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load reaction") + } + + var resultSlice []*Reaction + + var localJoinCols []null.String + for results.Next() { + one := new(Reaction) + var localJoinCol null.String + + err = results.Scan(&one.ID, &one.Directional, &one.Accession, &one.Status, &one.Comment, &one.Equation, &one.HTMLEquation, &one.IsChemicallyBalanced, &one.IsTransport, &one.Ec, &one.Location, &localJoinCol) + if err != nil { + return errors.Wrap(err, "failed to scan eager loaded results for reaction") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "failed to plebian-bind eager loaded slice reaction") + } + + resultSlice = append(resultSlice, one) + localJoinCols = append(localJoinCols, localJoinCol) + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on reaction") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for reaction") + } + + if len(reactionAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.Reactions = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &reactionR{} + } + foreign.R.Uniprots = append(foreign.R.Uniprots, object) + } + return nil + } + + for i, foreign := range resultSlice { + localJoinCol := localJoinCols[i] + for _, local := range slice { + if queries.Equal(local.Accession, localJoinCol) { + local.R.Reactions = append(local.R.Reactions, foreign) + if foreign.R == nil { + foreign.R = &reactionR{} + } + foreign.R.Uniprots = append(foreign.R.Uniprots, local) + break + } + } + } + + return nil +} + +// SetUniprotSeqhash of the uniprot to the related item. +// Sets o.R.UniprotSeqhash to related. +// Adds o to related.R.Uniprots. +func (o *Uniprot) SetUniprotSeqhash(ctx context.Context, exec boil.ContextExecutor, insert bool, related *Seqhash) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"uniprot\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"seqhash"}), + strmangle.WhereClause("\"", "\"", 0, uniprotPrimaryKeyColumns), + ) + values := []interface{}{related.Seqhash, o.Accession} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.Seqhash = related.Seqhash + if o.R == nil { + o.R = &uniprotR{ + UniprotSeqhash: related, + } + } else { + o.R.UniprotSeqhash = related + } + + if related.R == nil { + related.R = &seqhashR{ + Uniprots: UniprotSlice{o}, + } + } else { + related.R.Uniprots = append(related.R.Uniprots, o) + } + + return nil +} + +// AddReactions adds the given related objects to the existing relationships +// of the uniprot, optionally inserting them as new records. +// Appends related to o.R.Reactions. +// Sets related.R.Uniprots appropriately. +func (o *Uniprot) AddReactions(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Reaction) error { + var err error + for _, rel := range related { + if insert { + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + } + + for _, rel := range related { + query := "insert into \"uniprot_to_reaction\" (\"uniprot\", \"reaction\") values (?, ?)" + values := []interface{}{o.Accession, rel.Accession} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err = exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to insert into join table") + } + } + if o.R == nil { + o.R = &uniprotR{ + Reactions: related, + } + } else { + o.R.Reactions = append(o.R.Reactions, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &reactionR{ + Uniprots: UniprotSlice{o}, + } + } else { + rel.R.Uniprots = append(rel.R.Uniprots, o) + } + } + return nil +} + +// SetReactions removes all previously related items of the +// uniprot replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.Uniprots's Reactions accordingly. +// Replaces o.R.Reactions with related. +// Sets related.R.Uniprots's Reactions accordingly. +func (o *Uniprot) SetReactions(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Reaction) error { + query := "delete from \"uniprot_to_reaction\" where \"uniprot\" = ?" + values := []interface{}{o.Accession} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + removeReactionsFromUniprotsSlice(o, related) + if o.R != nil { + o.R.Reactions = nil + } + + return o.AddReactions(ctx, exec, insert, related...) +} + +// RemoveReactions relationships from objects passed in. +// Removes related items from R.Reactions (uses pointer comparison, removal does not keep order) +// Sets related.R.Uniprots. +func (o *Uniprot) RemoveReactions(ctx context.Context, exec boil.ContextExecutor, related ...*Reaction) error { + if len(related) == 0 { + return nil + } + + var err error + query := fmt.Sprintf( + "delete from \"uniprot_to_reaction\" where \"uniprot\" = ? and \"reaction\" in (%s)", + strmangle.Placeholders(dialect.UseIndexPlaceholders, len(related), 2, 1), + ) + values := []interface{}{o.Accession} + for _, rel := range related { + values = append(values, rel.Accession) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err = exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + removeReactionsFromUniprotsSlice(o, related) + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.Reactions { + if rel != ri { + continue + } + + ln := len(o.R.Reactions) + if ln > 1 && i < ln-1 { + o.R.Reactions[i] = o.R.Reactions[ln-1] + } + o.R.Reactions = o.R.Reactions[:ln-1] + break + } + } + + return nil +} + +func removeReactionsFromUniprotsSlice(o *Uniprot, related []*Reaction) { + for _, rel := range related { + if rel.R == nil { + continue + } + for i, ri := range rel.R.Uniprots { + if !queries.Equal(o.Accession, ri.Accession) { + continue + } + + ln := len(rel.R.Uniprots) + if ln > 1 && i < ln-1 { + rel.R.Uniprots[i] = rel.R.Uniprots[ln-1] + } + rel.R.Uniprots = rel.R.Uniprots[:ln-1] + break + } + } +} + +// Uniprots retrieves all the records using an executor. +func Uniprots(mods ...qm.QueryMod) uniprotQuery { + mods = append(mods, qm.From("\"uniprot\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"uniprot\".*"}) + } + + return uniprotQuery{q} +} + +// FindUniprot retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindUniprot(ctx context.Context, exec boil.ContextExecutor, accession null.String, selectCols ...string) (*Uniprot, error) { + uniprotObj := &Uniprot{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"uniprot\" where \"accession\"=?", sel, + ) + + q := queries.Raw(query, accession) + + err := q.Bind(ctx, exec, uniprotObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from uniprot") + } + + if err = uniprotObj.doAfterSelectHooks(ctx, exec); err != nil { + return uniprotObj, err + } + + return uniprotObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *Uniprot) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no uniprot provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(uniprotColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + uniprotInsertCacheMut.RLock() + cache, cached := uniprotInsertCache[key] + uniprotInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + uniprotAllColumns, + uniprotColumnsWithDefault, + uniprotColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(uniprotType, uniprotMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(uniprotType, uniprotMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"uniprot\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"uniprot\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into uniprot") + } + + if !cached { + uniprotInsertCacheMut.Lock() + uniprotInsertCache[key] = cache + uniprotInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the Uniprot. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *Uniprot) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + uniprotUpdateCacheMut.RLock() + cache, cached := uniprotUpdateCache[key] + uniprotUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + uniprotAllColumns, + uniprotPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update uniprot, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"uniprot\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, uniprotPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(uniprotType, uniprotMapping, append(wl, uniprotPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update uniprot row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for uniprot") + } + + if !cached { + uniprotUpdateCacheMut.Lock() + uniprotUpdateCache[key] = cache + uniprotUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q uniprotQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for uniprot") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for uniprot") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o UniprotSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), uniprotPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"uniprot\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, uniprotPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in uniprot slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all uniprot") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *Uniprot) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no uniprot provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(uniprotColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + uniprotUpsertCacheMut.RLock() + cache, cached := uniprotUpsertCache[key] + uniprotUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + uniprotAllColumns, + uniprotColumnsWithDefault, + uniprotColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + uniprotAllColumns, + uniprotPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert uniprot, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(uniprotPrimaryKeyColumns)) + copy(conflict, uniprotPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"uniprot\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(uniprotType, uniprotMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(uniprotType, uniprotMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert uniprot") + } + + if !cached { + uniprotUpsertCacheMut.Lock() + uniprotUpsertCache[key] = cache + uniprotUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single Uniprot record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *Uniprot) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no Uniprot provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), uniprotPrimaryKeyMapping) + sql := "DELETE FROM \"uniprot\" WHERE \"accession\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from uniprot") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for uniprot") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q uniprotQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no uniprotQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from uniprot") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for uniprot") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o UniprotSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(uniprotBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), uniprotPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"uniprot\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, uniprotPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from uniprot slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for uniprot") + } + + if len(uniprotAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *Uniprot) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindUniprot(ctx, exec, o.Accession) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *UniprotSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := UniprotSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), uniprotPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"uniprot\".* FROM \"uniprot\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, uniprotPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in UniprotSlice") + } + + *o = slice + + return nil +} + +// UniprotExists checks if the Uniprot row exists. +func UniprotExists(ctx context.Context, exec boil.ContextExecutor, accession null.String) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"uniprot\" where \"accession\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, accession) + } + row := exec.QueryRowContext(ctx, sql, accession) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if uniprot exists") + } + + return exists, nil +} diff --git a/models/usan_stems.go b/models/usan_stems.go new file mode 100644 index 0000000..90e5fa1 --- /dev/null +++ b/models/usan_stems.go @@ -0,0 +1,925 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// UsanStem is an object representing the database table. +type UsanStem struct { + UsanStemID int64 `boil:"usan_stem_id" json:"usan_stem_id" toml:"usan_stem_id" yaml:"usan_stem_id"` + Stem string `boil:"stem" json:"stem" toml:"stem" yaml:"stem"` + Subgroup string `boil:"subgroup" json:"subgroup" toml:"subgroup" yaml:"subgroup"` + Annotation null.String `boil:"annotation" json:"annotation,omitempty" toml:"annotation" yaml:"annotation,omitempty"` + StemClass null.String `boil:"stem_class" json:"stem_class,omitempty" toml:"stem_class" yaml:"stem_class,omitempty"` + MajorClass null.String `boil:"major_class" json:"major_class,omitempty" toml:"major_class" yaml:"major_class,omitempty"` + WhoExtra null.Int16 `boil:"who_extra" json:"who_extra,omitempty" toml:"who_extra" yaml:"who_extra,omitempty"` + + R *usanStemR `boil:"-" json:"-" toml:"-" yaml:"-"` + L usanStemL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var UsanStemColumns = struct { + UsanStemID string + Stem string + Subgroup string + Annotation string + StemClass string + MajorClass string + WhoExtra string +}{ + UsanStemID: "usan_stem_id", + Stem: "stem", + Subgroup: "subgroup", + Annotation: "annotation", + StemClass: "stem_class", + MajorClass: "major_class", + WhoExtra: "who_extra", +} + +var UsanStemTableColumns = struct { + UsanStemID string + Stem string + Subgroup string + Annotation string + StemClass string + MajorClass string + WhoExtra string +}{ + UsanStemID: "usan_stems.usan_stem_id", + Stem: "usan_stems.stem", + Subgroup: "usan_stems.subgroup", + Annotation: "usan_stems.annotation", + StemClass: "usan_stems.stem_class", + MajorClass: "usan_stems.major_class", + WhoExtra: "usan_stems.who_extra", +} + +// Generated where + +var UsanStemWhere = struct { + UsanStemID whereHelperint64 + Stem whereHelperstring + Subgroup whereHelperstring + Annotation whereHelpernull_String + StemClass whereHelpernull_String + MajorClass whereHelpernull_String + WhoExtra whereHelpernull_Int16 +}{ + UsanStemID: whereHelperint64{field: "\"usan_stems\".\"usan_stem_id\""}, + Stem: whereHelperstring{field: "\"usan_stems\".\"stem\""}, + Subgroup: whereHelperstring{field: "\"usan_stems\".\"subgroup\""}, + Annotation: whereHelpernull_String{field: "\"usan_stems\".\"annotation\""}, + StemClass: whereHelpernull_String{field: "\"usan_stems\".\"stem_class\""}, + MajorClass: whereHelpernull_String{field: "\"usan_stems\".\"major_class\""}, + WhoExtra: whereHelpernull_Int16{field: "\"usan_stems\".\"who_extra\""}, +} + +// UsanStemRels is where relationship names are stored. +var UsanStemRels = struct { +}{} + +// usanStemR is where relationships are stored. +type usanStemR struct { +} + +// NewStruct creates a new relationship struct +func (*usanStemR) NewStruct() *usanStemR { + return &usanStemR{} +} + +// usanStemL is where Load methods for each relationship are stored. +type usanStemL struct{} + +var ( + usanStemAllColumns = []string{"usan_stem_id", "stem", "subgroup", "annotation", "stem_class", "major_class", "who_extra"} + usanStemColumnsWithoutDefault = []string{"usan_stem_id", "stem", "subgroup"} + usanStemColumnsWithDefault = []string{"annotation", "stem_class", "major_class", "who_extra"} + usanStemPrimaryKeyColumns = []string{"usan_stem_id"} + usanStemGeneratedColumns = []string{} +) + +type ( + // UsanStemSlice is an alias for a slice of pointers to UsanStem. + // This should almost always be used instead of []UsanStem. + UsanStemSlice []*UsanStem + // UsanStemHook is the signature for custom UsanStem hook methods + UsanStemHook func(context.Context, boil.ContextExecutor, *UsanStem) error + + usanStemQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + usanStemType = reflect.TypeOf(&UsanStem{}) + usanStemMapping = queries.MakeStructMapping(usanStemType) + usanStemPrimaryKeyMapping, _ = queries.BindMapping(usanStemType, usanStemMapping, usanStemPrimaryKeyColumns) + usanStemInsertCacheMut sync.RWMutex + usanStemInsertCache = make(map[string]insertCache) + usanStemUpdateCacheMut sync.RWMutex + usanStemUpdateCache = make(map[string]updateCache) + usanStemUpsertCacheMut sync.RWMutex + usanStemUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var usanStemAfterSelectHooks []UsanStemHook + +var usanStemBeforeInsertHooks []UsanStemHook +var usanStemAfterInsertHooks []UsanStemHook + +var usanStemBeforeUpdateHooks []UsanStemHook +var usanStemAfterUpdateHooks []UsanStemHook + +var usanStemBeforeDeleteHooks []UsanStemHook +var usanStemAfterDeleteHooks []UsanStemHook + +var usanStemBeforeUpsertHooks []UsanStemHook +var usanStemAfterUpsertHooks []UsanStemHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *UsanStem) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range usanStemAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *UsanStem) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range usanStemBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *UsanStem) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range usanStemAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *UsanStem) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range usanStemBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *UsanStem) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range usanStemAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *UsanStem) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range usanStemBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *UsanStem) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range usanStemAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *UsanStem) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range usanStemBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *UsanStem) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range usanStemAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddUsanStemHook registers your hook function for all future operations. +func AddUsanStemHook(hookPoint boil.HookPoint, usanStemHook UsanStemHook) { + switch hookPoint { + case boil.AfterSelectHook: + usanStemAfterSelectHooks = append(usanStemAfterSelectHooks, usanStemHook) + case boil.BeforeInsertHook: + usanStemBeforeInsertHooks = append(usanStemBeforeInsertHooks, usanStemHook) + case boil.AfterInsertHook: + usanStemAfterInsertHooks = append(usanStemAfterInsertHooks, usanStemHook) + case boil.BeforeUpdateHook: + usanStemBeforeUpdateHooks = append(usanStemBeforeUpdateHooks, usanStemHook) + case boil.AfterUpdateHook: + usanStemAfterUpdateHooks = append(usanStemAfterUpdateHooks, usanStemHook) + case boil.BeforeDeleteHook: + usanStemBeforeDeleteHooks = append(usanStemBeforeDeleteHooks, usanStemHook) + case boil.AfterDeleteHook: + usanStemAfterDeleteHooks = append(usanStemAfterDeleteHooks, usanStemHook) + case boil.BeforeUpsertHook: + usanStemBeforeUpsertHooks = append(usanStemBeforeUpsertHooks, usanStemHook) + case boil.AfterUpsertHook: + usanStemAfterUpsertHooks = append(usanStemAfterUpsertHooks, usanStemHook) + } +} + +// One returns a single usanStem record from the query. +func (q usanStemQuery) One(ctx context.Context, exec boil.ContextExecutor) (*UsanStem, error) { + o := &UsanStem{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for usan_stems") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all UsanStem records from the query. +func (q usanStemQuery) All(ctx context.Context, exec boil.ContextExecutor) (UsanStemSlice, error) { + var o []*UsanStem + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to UsanStem slice") + } + + if len(usanStemAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all UsanStem records in the query. +func (q usanStemQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count usan_stems rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q usanStemQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if usan_stems exists") + } + + return count > 0, nil +} + +// UsanStems retrieves all the records using an executor. +func UsanStems(mods ...qm.QueryMod) usanStemQuery { + mods = append(mods, qm.From("\"usan_stems\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"usan_stems\".*"}) + } + + return usanStemQuery{q} +} + +// FindUsanStem retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindUsanStem(ctx context.Context, exec boil.ContextExecutor, usanStemID int64, selectCols ...string) (*UsanStem, error) { + usanStemObj := &UsanStem{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"usan_stems\" where \"usan_stem_id\"=?", sel, + ) + + q := queries.Raw(query, usanStemID) + + err := q.Bind(ctx, exec, usanStemObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from usan_stems") + } + + if err = usanStemObj.doAfterSelectHooks(ctx, exec); err != nil { + return usanStemObj, err + } + + return usanStemObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *UsanStem) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no usan_stems provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(usanStemColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + usanStemInsertCacheMut.RLock() + cache, cached := usanStemInsertCache[key] + usanStemInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + usanStemAllColumns, + usanStemColumnsWithDefault, + usanStemColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(usanStemType, usanStemMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(usanStemType, usanStemMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"usan_stems\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"usan_stems\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into usan_stems") + } + + if !cached { + usanStemInsertCacheMut.Lock() + usanStemInsertCache[key] = cache + usanStemInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the UsanStem. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *UsanStem) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + usanStemUpdateCacheMut.RLock() + cache, cached := usanStemUpdateCache[key] + usanStemUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + usanStemAllColumns, + usanStemPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update usan_stems, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"usan_stems\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, usanStemPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(usanStemType, usanStemMapping, append(wl, usanStemPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update usan_stems row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for usan_stems") + } + + if !cached { + usanStemUpdateCacheMut.Lock() + usanStemUpdateCache[key] = cache + usanStemUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q usanStemQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for usan_stems") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for usan_stems") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o UsanStemSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), usanStemPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"usan_stems\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, usanStemPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in usanStem slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all usanStem") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *UsanStem) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no usan_stems provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(usanStemColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + usanStemUpsertCacheMut.RLock() + cache, cached := usanStemUpsertCache[key] + usanStemUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + usanStemAllColumns, + usanStemColumnsWithDefault, + usanStemColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + usanStemAllColumns, + usanStemPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert usan_stems, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(usanStemPrimaryKeyColumns)) + copy(conflict, usanStemPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"usan_stems\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(usanStemType, usanStemMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(usanStemType, usanStemMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert usan_stems") + } + + if !cached { + usanStemUpsertCacheMut.Lock() + usanStemUpsertCache[key] = cache + usanStemUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single UsanStem record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *UsanStem) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no UsanStem provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), usanStemPrimaryKeyMapping) + sql := "DELETE FROM \"usan_stems\" WHERE \"usan_stem_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from usan_stems") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for usan_stems") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q usanStemQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no usanStemQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from usan_stems") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for usan_stems") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o UsanStemSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(usanStemBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), usanStemPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"usan_stems\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, usanStemPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from usanStem slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for usan_stems") + } + + if len(usanStemAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *UsanStem) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindUsanStem(ctx, exec, o.UsanStemID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *UsanStemSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := UsanStemSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), usanStemPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"usan_stems\".* FROM \"usan_stems\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, usanStemPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in UsanStemSlice") + } + + *o = slice + + return nil +} + +// UsanStemExists checks if the UsanStem row exists. +func UsanStemExists(ctx context.Context, exec boil.ContextExecutor, usanStemID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"usan_stems\" where \"usan_stem_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, usanStemID) + } + row := exec.QueryRowContext(ctx, sql, usanStemID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if usan_stems exists") + } + + return exists, nil +} diff --git a/models/user.model.go b/models/user.model.go deleted file mode 100644 index bc4eb3f..0000000 --- a/models/user.model.go +++ /dev/null @@ -1,14 +0,0 @@ -package models - -import ( - "time" -) - -type User struct { - // ID uuid.UUID `gorm:"type:uuid;default:uuid_generate_v3();primary_key"` - ID uint `gorm:"primary_key"` - Name string `gorm:"type:varchar(255);not null"` - Email string `gorm:"uniqueIndex;not null"` - CreatedAt time.Time - UpdatedAt time.Time -} diff --git a/models/variant_sequences.go b/models/variant_sequences.go new file mode 100644 index 0000000..b1e5642 --- /dev/null +++ b/models/variant_sequences.go @@ -0,0 +1,1431 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// VariantSequence is an object representing the database table. +type VariantSequence struct { + VariantID int64 `boil:"variant_id" json:"variant_id" toml:"variant_id" yaml:"variant_id"` + Mutation null.String `boil:"mutation" json:"mutation,omitempty" toml:"mutation" yaml:"mutation,omitempty"` + Accession null.String `boil:"accession" json:"accession,omitempty" toml:"accession" yaml:"accession,omitempty"` + Version null.Int64 `boil:"version" json:"version,omitempty" toml:"version" yaml:"version,omitempty"` + Isoform null.Int64 `boil:"isoform" json:"isoform,omitempty" toml:"isoform" yaml:"isoform,omitempty"` + Sequence null.String `boil:"sequence" json:"sequence,omitempty" toml:"sequence" yaml:"sequence,omitempty"` + Organism null.String `boil:"organism" json:"organism,omitempty" toml:"organism" yaml:"organism,omitempty"` + TaxID null.Int64 `boil:"tax_id" json:"tax_id,omitempty" toml:"tax_id" yaml:"tax_id,omitempty"` + + R *variantSequenceR `boil:"-" json:"-" toml:"-" yaml:"-"` + L variantSequenceL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var VariantSequenceColumns = struct { + VariantID string + Mutation string + Accession string + Version string + Isoform string + Sequence string + Organism string + TaxID string +}{ + VariantID: "variant_id", + Mutation: "mutation", + Accession: "accession", + Version: "version", + Isoform: "isoform", + Sequence: "sequence", + Organism: "organism", + TaxID: "tax_id", +} + +var VariantSequenceTableColumns = struct { + VariantID string + Mutation string + Accession string + Version string + Isoform string + Sequence string + Organism string + TaxID string +}{ + VariantID: "variant_sequences.variant_id", + Mutation: "variant_sequences.mutation", + Accession: "variant_sequences.accession", + Version: "variant_sequences.version", + Isoform: "variant_sequences.isoform", + Sequence: "variant_sequences.sequence", + Organism: "variant_sequences.organism", + TaxID: "variant_sequences.tax_id", +} + +// Generated where + +var VariantSequenceWhere = struct { + VariantID whereHelperint64 + Mutation whereHelpernull_String + Accession whereHelpernull_String + Version whereHelpernull_Int64 + Isoform whereHelpernull_Int64 + Sequence whereHelpernull_String + Organism whereHelpernull_String + TaxID whereHelpernull_Int64 +}{ + VariantID: whereHelperint64{field: "\"variant_sequences\".\"variant_id\""}, + Mutation: whereHelpernull_String{field: "\"variant_sequences\".\"mutation\""}, + Accession: whereHelpernull_String{field: "\"variant_sequences\".\"accession\""}, + Version: whereHelpernull_Int64{field: "\"variant_sequences\".\"version\""}, + Isoform: whereHelpernull_Int64{field: "\"variant_sequences\".\"isoform\""}, + Sequence: whereHelpernull_String{field: "\"variant_sequences\".\"sequence\""}, + Organism: whereHelpernull_String{field: "\"variant_sequences\".\"organism\""}, + TaxID: whereHelpernull_Int64{field: "\"variant_sequences\".\"tax_id\""}, +} + +// VariantSequenceRels is where relationship names are stored. +var VariantSequenceRels = struct { + VariantAssays string + VariantDrugMechanisms string +}{ + VariantAssays: "VariantAssays", + VariantDrugMechanisms: "VariantDrugMechanisms", +} + +// variantSequenceR is where relationships are stored. +type variantSequenceR struct { + VariantAssays AssaySlice `boil:"VariantAssays" json:"VariantAssays" toml:"VariantAssays" yaml:"VariantAssays"` + VariantDrugMechanisms DrugMechanismSlice `boil:"VariantDrugMechanisms" json:"VariantDrugMechanisms" toml:"VariantDrugMechanisms" yaml:"VariantDrugMechanisms"` +} + +// NewStruct creates a new relationship struct +func (*variantSequenceR) NewStruct() *variantSequenceR { + return &variantSequenceR{} +} + +func (r *variantSequenceR) GetVariantAssays() AssaySlice { + if r == nil { + return nil + } + return r.VariantAssays +} + +func (r *variantSequenceR) GetVariantDrugMechanisms() DrugMechanismSlice { + if r == nil { + return nil + } + return r.VariantDrugMechanisms +} + +// variantSequenceL is where Load methods for each relationship are stored. +type variantSequenceL struct{} + +var ( + variantSequenceAllColumns = []string{"variant_id", "mutation", "accession", "version", "isoform", "sequence", "organism", "tax_id"} + variantSequenceColumnsWithoutDefault = []string{"variant_id"} + variantSequenceColumnsWithDefault = []string{"mutation", "accession", "version", "isoform", "sequence", "organism", "tax_id"} + variantSequencePrimaryKeyColumns = []string{"variant_id"} + variantSequenceGeneratedColumns = []string{} +) + +type ( + // VariantSequenceSlice is an alias for a slice of pointers to VariantSequence. + // This should almost always be used instead of []VariantSequence. + VariantSequenceSlice []*VariantSequence + // VariantSequenceHook is the signature for custom VariantSequence hook methods + VariantSequenceHook func(context.Context, boil.ContextExecutor, *VariantSequence) error + + variantSequenceQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + variantSequenceType = reflect.TypeOf(&VariantSequence{}) + variantSequenceMapping = queries.MakeStructMapping(variantSequenceType) + variantSequencePrimaryKeyMapping, _ = queries.BindMapping(variantSequenceType, variantSequenceMapping, variantSequencePrimaryKeyColumns) + variantSequenceInsertCacheMut sync.RWMutex + variantSequenceInsertCache = make(map[string]insertCache) + variantSequenceUpdateCacheMut sync.RWMutex + variantSequenceUpdateCache = make(map[string]updateCache) + variantSequenceUpsertCacheMut sync.RWMutex + variantSequenceUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var variantSequenceAfterSelectHooks []VariantSequenceHook + +var variantSequenceBeforeInsertHooks []VariantSequenceHook +var variantSequenceAfterInsertHooks []VariantSequenceHook + +var variantSequenceBeforeUpdateHooks []VariantSequenceHook +var variantSequenceAfterUpdateHooks []VariantSequenceHook + +var variantSequenceBeforeDeleteHooks []VariantSequenceHook +var variantSequenceAfterDeleteHooks []VariantSequenceHook + +var variantSequenceBeforeUpsertHooks []VariantSequenceHook +var variantSequenceAfterUpsertHooks []VariantSequenceHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *VariantSequence) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range variantSequenceAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *VariantSequence) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range variantSequenceBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *VariantSequence) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range variantSequenceAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *VariantSequence) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range variantSequenceBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *VariantSequence) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range variantSequenceAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *VariantSequence) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range variantSequenceBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *VariantSequence) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range variantSequenceAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *VariantSequence) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range variantSequenceBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *VariantSequence) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range variantSequenceAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddVariantSequenceHook registers your hook function for all future operations. +func AddVariantSequenceHook(hookPoint boil.HookPoint, variantSequenceHook VariantSequenceHook) { + switch hookPoint { + case boil.AfterSelectHook: + variantSequenceAfterSelectHooks = append(variantSequenceAfterSelectHooks, variantSequenceHook) + case boil.BeforeInsertHook: + variantSequenceBeforeInsertHooks = append(variantSequenceBeforeInsertHooks, variantSequenceHook) + case boil.AfterInsertHook: + variantSequenceAfterInsertHooks = append(variantSequenceAfterInsertHooks, variantSequenceHook) + case boil.BeforeUpdateHook: + variantSequenceBeforeUpdateHooks = append(variantSequenceBeforeUpdateHooks, variantSequenceHook) + case boil.AfterUpdateHook: + variantSequenceAfterUpdateHooks = append(variantSequenceAfterUpdateHooks, variantSequenceHook) + case boil.BeforeDeleteHook: + variantSequenceBeforeDeleteHooks = append(variantSequenceBeforeDeleteHooks, variantSequenceHook) + case boil.AfterDeleteHook: + variantSequenceAfterDeleteHooks = append(variantSequenceAfterDeleteHooks, variantSequenceHook) + case boil.BeforeUpsertHook: + variantSequenceBeforeUpsertHooks = append(variantSequenceBeforeUpsertHooks, variantSequenceHook) + case boil.AfterUpsertHook: + variantSequenceAfterUpsertHooks = append(variantSequenceAfterUpsertHooks, variantSequenceHook) + } +} + +// One returns a single variantSequence record from the query. +func (q variantSequenceQuery) One(ctx context.Context, exec boil.ContextExecutor) (*VariantSequence, error) { + o := &VariantSequence{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for variant_sequences") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all VariantSequence records from the query. +func (q variantSequenceQuery) All(ctx context.Context, exec boil.ContextExecutor) (VariantSequenceSlice, error) { + var o []*VariantSequence + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to VariantSequence slice") + } + + if len(variantSequenceAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all VariantSequence records in the query. +func (q variantSequenceQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count variant_sequences rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q variantSequenceQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if variant_sequences exists") + } + + return count > 0, nil +} + +// VariantAssays retrieves all the assay's Assays with an executor via variant_id column. +func (o *VariantSequence) VariantAssays(mods ...qm.QueryMod) assayQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"assays\".\"variant_id\"=?", o.VariantID), + ) + + return Assays(queryMods...) +} + +// VariantDrugMechanisms retrieves all the drug_mechanism's DrugMechanisms with an executor via variant_id column. +func (o *VariantSequence) VariantDrugMechanisms(mods ...qm.QueryMod) drugMechanismQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"drug_mechanism\".\"variant_id\"=?", o.VariantID), + ) + + return DrugMechanisms(queryMods...) +} + +// LoadVariantAssays allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (variantSequenceL) LoadVariantAssays(ctx context.Context, e boil.ContextExecutor, singular bool, maybeVariantSequence interface{}, mods queries.Applicator) error { + var slice []*VariantSequence + var object *VariantSequence + + if singular { + object = maybeVariantSequence.(*VariantSequence) + } else { + slice = *maybeVariantSequence.(*[]*VariantSequence) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &variantSequenceR{} + } + args = append(args, object.VariantID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &variantSequenceR{} + } + + for _, a := range args { + if queries.Equal(a, obj.VariantID) { + continue Outer + } + } + + args = append(args, obj.VariantID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`assays`), + qm.WhereIn(`assays.variant_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load assays") + } + + var resultSlice []*Assay + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice assays") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on assays") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for assays") + } + + if len(assayAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.VariantAssays = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &assayR{} + } + foreign.R.Variant = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.VariantID, foreign.VariantID) { + local.R.VariantAssays = append(local.R.VariantAssays, foreign) + if foreign.R == nil { + foreign.R = &assayR{} + } + foreign.R.Variant = local + break + } + } + } + + return nil +} + +// LoadVariantDrugMechanisms allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (variantSequenceL) LoadVariantDrugMechanisms(ctx context.Context, e boil.ContextExecutor, singular bool, maybeVariantSequence interface{}, mods queries.Applicator) error { + var slice []*VariantSequence + var object *VariantSequence + + if singular { + object = maybeVariantSequence.(*VariantSequence) + } else { + slice = *maybeVariantSequence.(*[]*VariantSequence) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &variantSequenceR{} + } + args = append(args, object.VariantID) + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &variantSequenceR{} + } + + for _, a := range args { + if queries.Equal(a, obj.VariantID) { + continue Outer + } + } + + args = append(args, obj.VariantID) + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`drug_mechanism`), + qm.WhereIn(`drug_mechanism.variant_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load drug_mechanism") + } + + var resultSlice []*DrugMechanism + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice drug_mechanism") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on drug_mechanism") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for drug_mechanism") + } + + if len(drugMechanismAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.VariantDrugMechanisms = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &drugMechanismR{} + } + foreign.R.Variant = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if queries.Equal(local.VariantID, foreign.VariantID) { + local.R.VariantDrugMechanisms = append(local.R.VariantDrugMechanisms, foreign) + if foreign.R == nil { + foreign.R = &drugMechanismR{} + } + foreign.R.Variant = local + break + } + } + } + + return nil +} + +// AddVariantAssays adds the given related objects to the existing relationships +// of the variant_sequence, optionally inserting them as new records. +// Appends related to o.R.VariantAssays. +// Sets related.R.Variant appropriately. +func (o *VariantSequence) AddVariantAssays(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Assay) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.VariantID, o.VariantID) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"assays\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"variant_id"}), + strmangle.WhereClause("\"", "\"", 0, assayPrimaryKeyColumns), + ) + values := []interface{}{o.VariantID, rel.AssayID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.VariantID, o.VariantID) + } + } + + if o.R == nil { + o.R = &variantSequenceR{ + VariantAssays: related, + } + } else { + o.R.VariantAssays = append(o.R.VariantAssays, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &assayR{ + Variant: o, + } + } else { + rel.R.Variant = o + } + } + return nil +} + +// SetVariantAssays removes all previously related items of the +// variant_sequence replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.Variant's VariantAssays accordingly. +// Replaces o.R.VariantAssays with related. +// Sets related.R.Variant's VariantAssays accordingly. +func (o *VariantSequence) SetVariantAssays(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Assay) error { + query := "update \"assays\" set \"variant_id\" = null where \"variant_id\" = ?" + values := []interface{}{o.VariantID} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + if o.R != nil { + for _, rel := range o.R.VariantAssays { + queries.SetScanner(&rel.VariantID, nil) + if rel.R == nil { + continue + } + + rel.R.Variant = nil + } + o.R.VariantAssays = nil + } + + return o.AddVariantAssays(ctx, exec, insert, related...) +} + +// RemoveVariantAssays relationships from objects passed in. +// Removes related items from R.VariantAssays (uses pointer comparison, removal does not keep order) +// Sets related.R.Variant. +func (o *VariantSequence) RemoveVariantAssays(ctx context.Context, exec boil.ContextExecutor, related ...*Assay) error { + if len(related) == 0 { + return nil + } + + var err error + for _, rel := range related { + queries.SetScanner(&rel.VariantID, nil) + if rel.R != nil { + rel.R.Variant = nil + } + if _, err = rel.Update(ctx, exec, boil.Whitelist("variant_id")); err != nil { + return err + } + } + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.VariantAssays { + if rel != ri { + continue + } + + ln := len(o.R.VariantAssays) + if ln > 1 && i < ln-1 { + o.R.VariantAssays[i] = o.R.VariantAssays[ln-1] + } + o.R.VariantAssays = o.R.VariantAssays[:ln-1] + break + } + } + + return nil +} + +// AddVariantDrugMechanisms adds the given related objects to the existing relationships +// of the variant_sequence, optionally inserting them as new records. +// Appends related to o.R.VariantDrugMechanisms. +// Sets related.R.Variant appropriately. +func (o *VariantSequence) AddVariantDrugMechanisms(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*DrugMechanism) error { + var err error + for _, rel := range related { + if insert { + queries.Assign(&rel.VariantID, o.VariantID) + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"drug_mechanism\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"variant_id"}), + strmangle.WhereClause("\"", "\"", 0, drugMechanismPrimaryKeyColumns), + ) + values := []interface{}{o.VariantID, rel.MecID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + queries.Assign(&rel.VariantID, o.VariantID) + } + } + + if o.R == nil { + o.R = &variantSequenceR{ + VariantDrugMechanisms: related, + } + } else { + o.R.VariantDrugMechanisms = append(o.R.VariantDrugMechanisms, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &drugMechanismR{ + Variant: o, + } + } else { + rel.R.Variant = o + } + } + return nil +} + +// SetVariantDrugMechanisms removes all previously related items of the +// variant_sequence replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.Variant's VariantDrugMechanisms accordingly. +// Replaces o.R.VariantDrugMechanisms with related. +// Sets related.R.Variant's VariantDrugMechanisms accordingly. +func (o *VariantSequence) SetVariantDrugMechanisms(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*DrugMechanism) error { + query := "update \"drug_mechanism\" set \"variant_id\" = null where \"variant_id\" = ?" + values := []interface{}{o.VariantID} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + if o.R != nil { + for _, rel := range o.R.VariantDrugMechanisms { + queries.SetScanner(&rel.VariantID, nil) + if rel.R == nil { + continue + } + + rel.R.Variant = nil + } + o.R.VariantDrugMechanisms = nil + } + + return o.AddVariantDrugMechanisms(ctx, exec, insert, related...) +} + +// RemoveVariantDrugMechanisms relationships from objects passed in. +// Removes related items from R.VariantDrugMechanisms (uses pointer comparison, removal does not keep order) +// Sets related.R.Variant. +func (o *VariantSequence) RemoveVariantDrugMechanisms(ctx context.Context, exec boil.ContextExecutor, related ...*DrugMechanism) error { + if len(related) == 0 { + return nil + } + + var err error + for _, rel := range related { + queries.SetScanner(&rel.VariantID, nil) + if rel.R != nil { + rel.R.Variant = nil + } + if _, err = rel.Update(ctx, exec, boil.Whitelist("variant_id")); err != nil { + return err + } + } + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.VariantDrugMechanisms { + if rel != ri { + continue + } + + ln := len(o.R.VariantDrugMechanisms) + if ln > 1 && i < ln-1 { + o.R.VariantDrugMechanisms[i] = o.R.VariantDrugMechanisms[ln-1] + } + o.R.VariantDrugMechanisms = o.R.VariantDrugMechanisms[:ln-1] + break + } + } + + return nil +} + +// VariantSequences retrieves all the records using an executor. +func VariantSequences(mods ...qm.QueryMod) variantSequenceQuery { + mods = append(mods, qm.From("\"variant_sequences\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"variant_sequences\".*"}) + } + + return variantSequenceQuery{q} +} + +// FindVariantSequence retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindVariantSequence(ctx context.Context, exec boil.ContextExecutor, variantID int64, selectCols ...string) (*VariantSequence, error) { + variantSequenceObj := &VariantSequence{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"variant_sequences\" where \"variant_id\"=?", sel, + ) + + q := queries.Raw(query, variantID) + + err := q.Bind(ctx, exec, variantSequenceObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from variant_sequences") + } + + if err = variantSequenceObj.doAfterSelectHooks(ctx, exec); err != nil { + return variantSequenceObj, err + } + + return variantSequenceObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *VariantSequence) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no variant_sequences provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(variantSequenceColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + variantSequenceInsertCacheMut.RLock() + cache, cached := variantSequenceInsertCache[key] + variantSequenceInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + variantSequenceAllColumns, + variantSequenceColumnsWithDefault, + variantSequenceColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(variantSequenceType, variantSequenceMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(variantSequenceType, variantSequenceMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"variant_sequences\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"variant_sequences\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into variant_sequences") + } + + if !cached { + variantSequenceInsertCacheMut.Lock() + variantSequenceInsertCache[key] = cache + variantSequenceInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the VariantSequence. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *VariantSequence) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + variantSequenceUpdateCacheMut.RLock() + cache, cached := variantSequenceUpdateCache[key] + variantSequenceUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + variantSequenceAllColumns, + variantSequencePrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update variant_sequences, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"variant_sequences\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, variantSequencePrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(variantSequenceType, variantSequenceMapping, append(wl, variantSequencePrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update variant_sequences row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for variant_sequences") + } + + if !cached { + variantSequenceUpdateCacheMut.Lock() + variantSequenceUpdateCache[key] = cache + variantSequenceUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q variantSequenceQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for variant_sequences") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for variant_sequences") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o VariantSequenceSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), variantSequencePrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"variant_sequences\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, variantSequencePrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in variantSequence slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all variantSequence") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *VariantSequence) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no variant_sequences provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(variantSequenceColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + variantSequenceUpsertCacheMut.RLock() + cache, cached := variantSequenceUpsertCache[key] + variantSequenceUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + variantSequenceAllColumns, + variantSequenceColumnsWithDefault, + variantSequenceColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + variantSequenceAllColumns, + variantSequencePrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert variant_sequences, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(variantSequencePrimaryKeyColumns)) + copy(conflict, variantSequencePrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"variant_sequences\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(variantSequenceType, variantSequenceMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(variantSequenceType, variantSequenceMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert variant_sequences") + } + + if !cached { + variantSequenceUpsertCacheMut.Lock() + variantSequenceUpsertCache[key] = cache + variantSequenceUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single VariantSequence record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *VariantSequence) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no VariantSequence provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), variantSequencePrimaryKeyMapping) + sql := "DELETE FROM \"variant_sequences\" WHERE \"variant_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from variant_sequences") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for variant_sequences") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q variantSequenceQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no variantSequenceQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from variant_sequences") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for variant_sequences") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o VariantSequenceSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(variantSequenceBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), variantSequencePrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"variant_sequences\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, variantSequencePrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from variantSequence slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for variant_sequences") + } + + if len(variantSequenceAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *VariantSequence) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindVariantSequence(ctx, exec, o.VariantID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *VariantSequenceSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := VariantSequenceSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), variantSequencePrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"variant_sequences\".* FROM \"variant_sequences\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, variantSequencePrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in VariantSequenceSlice") + } + + *o = slice + + return nil +} + +// VariantSequenceExists checks if the VariantSequence row exists. +func VariantSequenceExists(ctx context.Context, exec boil.ContextExecutor, variantID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"variant_sequences\" where \"variant_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, variantID) + } + row := exec.QueryRowContext(ctx, sql, variantID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if variant_sequences exists") + } + + return exists, nil +} diff --git a/models/version.go b/models/version.go new file mode 100644 index 0000000..7bf1386 --- /dev/null +++ b/models/version.go @@ -0,0 +1,897 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// Version is an object representing the database table. +type Version struct { + Name string `boil:"name" json:"name" toml:"name" yaml:"name"` + CreationDate null.Time `boil:"creation_date" json:"creation_date,omitempty" toml:"creation_date" yaml:"creation_date,omitempty"` + Comments null.String `boil:"comments" json:"comments,omitempty" toml:"comments" yaml:"comments,omitempty"` + + R *versionR `boil:"-" json:"-" toml:"-" yaml:"-"` + L versionL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var VersionColumns = struct { + Name string + CreationDate string + Comments string +}{ + Name: "name", + CreationDate: "creation_date", + Comments: "comments", +} + +var VersionTableColumns = struct { + Name string + CreationDate string + Comments string +}{ + Name: "version.name", + CreationDate: "version.creation_date", + Comments: "version.comments", +} + +// Generated where + +var VersionWhere = struct { + Name whereHelperstring + CreationDate whereHelpernull_Time + Comments whereHelpernull_String +}{ + Name: whereHelperstring{field: "\"version\".\"name\""}, + CreationDate: whereHelpernull_Time{field: "\"version\".\"creation_date\""}, + Comments: whereHelpernull_String{field: "\"version\".\"comments\""}, +} + +// VersionRels is where relationship names are stored. +var VersionRels = struct { +}{} + +// versionR is where relationships are stored. +type versionR struct { +} + +// NewStruct creates a new relationship struct +func (*versionR) NewStruct() *versionR { + return &versionR{} +} + +// versionL is where Load methods for each relationship are stored. +type versionL struct{} + +var ( + versionAllColumns = []string{"name", "creation_date", "comments"} + versionColumnsWithoutDefault = []string{"name"} + versionColumnsWithDefault = []string{"creation_date", "comments"} + versionPrimaryKeyColumns = []string{"name"} + versionGeneratedColumns = []string{} +) + +type ( + // VersionSlice is an alias for a slice of pointers to Version. + // This should almost always be used instead of []Version. + VersionSlice []*Version + // VersionHook is the signature for custom Version hook methods + VersionHook func(context.Context, boil.ContextExecutor, *Version) error + + versionQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + versionType = reflect.TypeOf(&Version{}) + versionMapping = queries.MakeStructMapping(versionType) + versionPrimaryKeyMapping, _ = queries.BindMapping(versionType, versionMapping, versionPrimaryKeyColumns) + versionInsertCacheMut sync.RWMutex + versionInsertCache = make(map[string]insertCache) + versionUpdateCacheMut sync.RWMutex + versionUpdateCache = make(map[string]updateCache) + versionUpsertCacheMut sync.RWMutex + versionUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var versionAfterSelectHooks []VersionHook + +var versionBeforeInsertHooks []VersionHook +var versionAfterInsertHooks []VersionHook + +var versionBeforeUpdateHooks []VersionHook +var versionAfterUpdateHooks []VersionHook + +var versionBeforeDeleteHooks []VersionHook +var versionAfterDeleteHooks []VersionHook + +var versionBeforeUpsertHooks []VersionHook +var versionAfterUpsertHooks []VersionHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *Version) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range versionAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *Version) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range versionBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *Version) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range versionAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *Version) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range versionBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *Version) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range versionAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *Version) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range versionBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *Version) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range versionAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *Version) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range versionBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *Version) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range versionAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddVersionHook registers your hook function for all future operations. +func AddVersionHook(hookPoint boil.HookPoint, versionHook VersionHook) { + switch hookPoint { + case boil.AfterSelectHook: + versionAfterSelectHooks = append(versionAfterSelectHooks, versionHook) + case boil.BeforeInsertHook: + versionBeforeInsertHooks = append(versionBeforeInsertHooks, versionHook) + case boil.AfterInsertHook: + versionAfterInsertHooks = append(versionAfterInsertHooks, versionHook) + case boil.BeforeUpdateHook: + versionBeforeUpdateHooks = append(versionBeforeUpdateHooks, versionHook) + case boil.AfterUpdateHook: + versionAfterUpdateHooks = append(versionAfterUpdateHooks, versionHook) + case boil.BeforeDeleteHook: + versionBeforeDeleteHooks = append(versionBeforeDeleteHooks, versionHook) + case boil.AfterDeleteHook: + versionAfterDeleteHooks = append(versionAfterDeleteHooks, versionHook) + case boil.BeforeUpsertHook: + versionBeforeUpsertHooks = append(versionBeforeUpsertHooks, versionHook) + case boil.AfterUpsertHook: + versionAfterUpsertHooks = append(versionAfterUpsertHooks, versionHook) + } +} + +// One returns a single version record from the query. +func (q versionQuery) One(ctx context.Context, exec boil.ContextExecutor) (*Version, error) { + o := &Version{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for version") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all Version records from the query. +func (q versionQuery) All(ctx context.Context, exec boil.ContextExecutor) (VersionSlice, error) { + var o []*Version + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to Version slice") + } + + if len(versionAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all Version records in the query. +func (q versionQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count version rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q versionQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if version exists") + } + + return count > 0, nil +} + +// Versions retrieves all the records using an executor. +func Versions(mods ...qm.QueryMod) versionQuery { + mods = append(mods, qm.From("\"version\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"version\".*"}) + } + + return versionQuery{q} +} + +// FindVersion retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindVersion(ctx context.Context, exec boil.ContextExecutor, name string, selectCols ...string) (*Version, error) { + versionObj := &Version{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"version\" where \"name\"=?", sel, + ) + + q := queries.Raw(query, name) + + err := q.Bind(ctx, exec, versionObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from version") + } + + if err = versionObj.doAfterSelectHooks(ctx, exec); err != nil { + return versionObj, err + } + + return versionObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *Version) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no version provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(versionColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + versionInsertCacheMut.RLock() + cache, cached := versionInsertCache[key] + versionInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + versionAllColumns, + versionColumnsWithDefault, + versionColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(versionType, versionMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(versionType, versionMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"version\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"version\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into version") + } + + if !cached { + versionInsertCacheMut.Lock() + versionInsertCache[key] = cache + versionInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the Version. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *Version) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + versionUpdateCacheMut.RLock() + cache, cached := versionUpdateCache[key] + versionUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + versionAllColumns, + versionPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update version, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"version\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, versionPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(versionType, versionMapping, append(wl, versionPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update version row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for version") + } + + if !cached { + versionUpdateCacheMut.Lock() + versionUpdateCache[key] = cache + versionUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q versionQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for version") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for version") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o VersionSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), versionPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"version\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, versionPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in version slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all version") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *Version) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no version provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(versionColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + versionUpsertCacheMut.RLock() + cache, cached := versionUpsertCache[key] + versionUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + versionAllColumns, + versionColumnsWithDefault, + versionColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + versionAllColumns, + versionPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert version, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(versionPrimaryKeyColumns)) + copy(conflict, versionPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"version\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(versionType, versionMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(versionType, versionMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert version") + } + + if !cached { + versionUpsertCacheMut.Lock() + versionUpsertCache[key] = cache + versionUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single Version record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *Version) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no Version provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), versionPrimaryKeyMapping) + sql := "DELETE FROM \"version\" WHERE \"name\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from version") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for version") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q versionQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no versionQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from version") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for version") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o VersionSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(versionBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), versionPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"version\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, versionPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from version slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for version") + } + + if len(versionAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *Version) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindVersion(ctx, exec, o.Name) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *VersionSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := VersionSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), versionPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"version\".* FROM \"version\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, versionPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in VersionSlice") + } + + *o = slice + + return nil +} + +// VersionExists checks if the Version row exists. +func VersionExists(ctx context.Context, exec boil.ContextExecutor, name string) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"version\" where \"name\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, name) + } + row := exec.QueryRowContext(ctx, sql, name) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if version exists") + } + + return exists, nil +} diff --git a/models/warning_refs.go b/models/warning_refs.go new file mode 100644 index 0000000..722a68c --- /dev/null +++ b/models/warning_refs.go @@ -0,0 +1,1121 @@ +// Code generated by SQLBoiler 4.11.0 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// WarningRef is an object representing the database table. +type WarningRef struct { + WarnrefID int64 `boil:"warnref_id" json:"warnref_id" toml:"warnref_id" yaml:"warnref_id"` + WarningID null.Int64 `boil:"warning_id" json:"warning_id,omitempty" toml:"warning_id" yaml:"warning_id,omitempty"` + RefType null.String `boil:"ref_type" json:"ref_type,omitempty" toml:"ref_type" yaml:"ref_type,omitempty"` + RefID null.String `boil:"ref_id" json:"ref_id,omitempty" toml:"ref_id" yaml:"ref_id,omitempty"` + RefURL null.String `boil:"ref_url" json:"ref_url,omitempty" toml:"ref_url" yaml:"ref_url,omitempty"` + + R *warningRefR `boil:"-" json:"-" toml:"-" yaml:"-"` + L warningRefL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var WarningRefColumns = struct { + WarnrefID string + WarningID string + RefType string + RefID string + RefURL string +}{ + WarnrefID: "warnref_id", + WarningID: "warning_id", + RefType: "ref_type", + RefID: "ref_id", + RefURL: "ref_url", +} + +var WarningRefTableColumns = struct { + WarnrefID string + WarningID string + RefType string + RefID string + RefURL string +}{ + WarnrefID: "warning_refs.warnref_id", + WarningID: "warning_refs.warning_id", + RefType: "warning_refs.ref_type", + RefID: "warning_refs.ref_id", + RefURL: "warning_refs.ref_url", +} + +// Generated where + +var WarningRefWhere = struct { + WarnrefID whereHelperint64 + WarningID whereHelpernull_Int64 + RefType whereHelpernull_String + RefID whereHelpernull_String + RefURL whereHelpernull_String +}{ + WarnrefID: whereHelperint64{field: "\"warning_refs\".\"warnref_id\""}, + WarningID: whereHelpernull_Int64{field: "\"warning_refs\".\"warning_id\""}, + RefType: whereHelpernull_String{field: "\"warning_refs\".\"ref_type\""}, + RefID: whereHelpernull_String{field: "\"warning_refs\".\"ref_id\""}, + RefURL: whereHelpernull_String{field: "\"warning_refs\".\"ref_url\""}, +} + +// WarningRefRels is where relationship names are stored. +var WarningRefRels = struct { + Warning string +}{ + Warning: "Warning", +} + +// warningRefR is where relationships are stored. +type warningRefR struct { + Warning *DrugWarning `boil:"Warning" json:"Warning" toml:"Warning" yaml:"Warning"` +} + +// NewStruct creates a new relationship struct +func (*warningRefR) NewStruct() *warningRefR { + return &warningRefR{} +} + +func (r *warningRefR) GetWarning() *DrugWarning { + if r == nil { + return nil + } + return r.Warning +} + +// warningRefL is where Load methods for each relationship are stored. +type warningRefL struct{} + +var ( + warningRefAllColumns = []string{"warnref_id", "warning_id", "ref_type", "ref_id", "ref_url"} + warningRefColumnsWithoutDefault = []string{"warnref_id"} + warningRefColumnsWithDefault = []string{"warning_id", "ref_type", "ref_id", "ref_url"} + warningRefPrimaryKeyColumns = []string{"warnref_id"} + warningRefGeneratedColumns = []string{} +) + +type ( + // WarningRefSlice is an alias for a slice of pointers to WarningRef. + // This should almost always be used instead of []WarningRef. + WarningRefSlice []*WarningRef + // WarningRefHook is the signature for custom WarningRef hook methods + WarningRefHook func(context.Context, boil.ContextExecutor, *WarningRef) error + + warningRefQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + warningRefType = reflect.TypeOf(&WarningRef{}) + warningRefMapping = queries.MakeStructMapping(warningRefType) + warningRefPrimaryKeyMapping, _ = queries.BindMapping(warningRefType, warningRefMapping, warningRefPrimaryKeyColumns) + warningRefInsertCacheMut sync.RWMutex + warningRefInsertCache = make(map[string]insertCache) + warningRefUpdateCacheMut sync.RWMutex + warningRefUpdateCache = make(map[string]updateCache) + warningRefUpsertCacheMut sync.RWMutex + warningRefUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var warningRefAfterSelectHooks []WarningRefHook + +var warningRefBeforeInsertHooks []WarningRefHook +var warningRefAfterInsertHooks []WarningRefHook + +var warningRefBeforeUpdateHooks []WarningRefHook +var warningRefAfterUpdateHooks []WarningRefHook + +var warningRefBeforeDeleteHooks []WarningRefHook +var warningRefAfterDeleteHooks []WarningRefHook + +var warningRefBeforeUpsertHooks []WarningRefHook +var warningRefAfterUpsertHooks []WarningRefHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *WarningRef) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range warningRefAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *WarningRef) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range warningRefBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *WarningRef) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range warningRefAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *WarningRef) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range warningRefBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *WarningRef) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range warningRefAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *WarningRef) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range warningRefBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *WarningRef) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range warningRefAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *WarningRef) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range warningRefBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *WarningRef) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range warningRefAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddWarningRefHook registers your hook function for all future operations. +func AddWarningRefHook(hookPoint boil.HookPoint, warningRefHook WarningRefHook) { + switch hookPoint { + case boil.AfterSelectHook: + warningRefAfterSelectHooks = append(warningRefAfterSelectHooks, warningRefHook) + case boil.BeforeInsertHook: + warningRefBeforeInsertHooks = append(warningRefBeforeInsertHooks, warningRefHook) + case boil.AfterInsertHook: + warningRefAfterInsertHooks = append(warningRefAfterInsertHooks, warningRefHook) + case boil.BeforeUpdateHook: + warningRefBeforeUpdateHooks = append(warningRefBeforeUpdateHooks, warningRefHook) + case boil.AfterUpdateHook: + warningRefAfterUpdateHooks = append(warningRefAfterUpdateHooks, warningRefHook) + case boil.BeforeDeleteHook: + warningRefBeforeDeleteHooks = append(warningRefBeforeDeleteHooks, warningRefHook) + case boil.AfterDeleteHook: + warningRefAfterDeleteHooks = append(warningRefAfterDeleteHooks, warningRefHook) + case boil.BeforeUpsertHook: + warningRefBeforeUpsertHooks = append(warningRefBeforeUpsertHooks, warningRefHook) + case boil.AfterUpsertHook: + warningRefAfterUpsertHooks = append(warningRefAfterUpsertHooks, warningRefHook) + } +} + +// One returns a single warningRef record from the query. +func (q warningRefQuery) One(ctx context.Context, exec boil.ContextExecutor) (*WarningRef, error) { + o := &WarningRef{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: failed to execute a one query for warning_refs") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all WarningRef records from the query. +func (q warningRefQuery) All(ctx context.Context, exec boil.ContextExecutor) (WarningRefSlice, error) { + var o []*WarningRef + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "models: failed to assign all query results to WarningRef slice") + } + + if len(warningRefAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all WarningRef records in the query. +func (q warningRefQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "models: failed to count warning_refs rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q warningRefQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "models: failed to check if warning_refs exists") + } + + return count > 0, nil +} + +// Warning pointed to by the foreign key. +func (o *WarningRef) Warning(mods ...qm.QueryMod) drugWarningQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"warning_id\" = ?", o.WarningID), + } + + queryMods = append(queryMods, mods...) + + return DrugWarnings(queryMods...) +} + +// LoadWarning allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (warningRefL) LoadWarning(ctx context.Context, e boil.ContextExecutor, singular bool, maybeWarningRef interface{}, mods queries.Applicator) error { + var slice []*WarningRef + var object *WarningRef + + if singular { + object = maybeWarningRef.(*WarningRef) + } else { + slice = *maybeWarningRef.(*[]*WarningRef) + } + + args := make([]interface{}, 0, 1) + if singular { + if object.R == nil { + object.R = &warningRefR{} + } + if !queries.IsNil(object.WarningID) { + args = append(args, object.WarningID) + } + + } else { + Outer: + for _, obj := range slice { + if obj.R == nil { + obj.R = &warningRefR{} + } + + for _, a := range args { + if queries.Equal(a, obj.WarningID) { + continue Outer + } + } + + if !queries.IsNil(obj.WarningID) { + args = append(args, obj.WarningID) + } + + } + } + + if len(args) == 0 { + return nil + } + + query := NewQuery( + qm.From(`drug_warning`), + qm.WhereIn(`drug_warning.warning_id in ?`, args...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load DrugWarning") + } + + var resultSlice []*DrugWarning + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice DrugWarning") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for drug_warning") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for drug_warning") + } + + if len(warningRefAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Warning = foreign + if foreign.R == nil { + foreign.R = &drugWarningR{} + } + foreign.R.WarningWarningRefs = append(foreign.R.WarningWarningRefs, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if queries.Equal(local.WarningID, foreign.WarningID) { + local.R.Warning = foreign + if foreign.R == nil { + foreign.R = &drugWarningR{} + } + foreign.R.WarningWarningRefs = append(foreign.R.WarningWarningRefs, local) + break + } + } + } + + return nil +} + +// SetWarning of the warningRef to the related item. +// Sets o.R.Warning to related. +// Adds o to related.R.WarningWarningRefs. +func (o *WarningRef) SetWarning(ctx context.Context, exec boil.ContextExecutor, insert bool, related *DrugWarning) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"warning_refs\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, []string{"warning_id"}), + strmangle.WhereClause("\"", "\"", 0, warningRefPrimaryKeyColumns), + ) + values := []interface{}{related.WarningID, o.WarnrefID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + queries.Assign(&o.WarningID, related.WarningID) + if o.R == nil { + o.R = &warningRefR{ + Warning: related, + } + } else { + o.R.Warning = related + } + + if related.R == nil { + related.R = &drugWarningR{ + WarningWarningRefs: WarningRefSlice{o}, + } + } else { + related.R.WarningWarningRefs = append(related.R.WarningWarningRefs, o) + } + + return nil +} + +// RemoveWarning relationship. +// Sets o.R.Warning to nil. +// Removes o from all passed in related items' relationships struct. +func (o *WarningRef) RemoveWarning(ctx context.Context, exec boil.ContextExecutor, related *DrugWarning) error { + var err error + + queries.SetScanner(&o.WarningID, nil) + if _, err = o.Update(ctx, exec, boil.Whitelist("warning_id")); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + if o.R != nil { + o.R.Warning = nil + } + if related == nil || related.R == nil { + return nil + } + + for i, ri := range related.R.WarningWarningRefs { + if queries.Equal(o.WarningID, ri.WarningID) { + continue + } + + ln := len(related.R.WarningWarningRefs) + if ln > 1 && i < ln-1 { + related.R.WarningWarningRefs[i] = related.R.WarningWarningRefs[ln-1] + } + related.R.WarningWarningRefs = related.R.WarningWarningRefs[:ln-1] + break + } + return nil +} + +// WarningRefs retrieves all the records using an executor. +func WarningRefs(mods ...qm.QueryMod) warningRefQuery { + mods = append(mods, qm.From("\"warning_refs\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"warning_refs\".*"}) + } + + return warningRefQuery{q} +} + +// FindWarningRef retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindWarningRef(ctx context.Context, exec boil.ContextExecutor, warnrefID int64, selectCols ...string) (*WarningRef, error) { + warningRefObj := &WarningRef{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"warning_refs\" where \"warnref_id\"=?", sel, + ) + + q := queries.Raw(query, warnrefID) + + err := q.Bind(ctx, exec, warningRefObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "models: unable to select from warning_refs") + } + + if err = warningRefObj.doAfterSelectHooks(ctx, exec); err != nil { + return warningRefObj, err + } + + return warningRefObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *WarningRef) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("models: no warning_refs provided for insertion") + } + + var err error + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(warningRefColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + warningRefInsertCacheMut.RLock() + cache, cached := warningRefInsertCache[key] + warningRefInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + warningRefAllColumns, + warningRefColumnsWithDefault, + warningRefColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(warningRefType, warningRefMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(warningRefType, warningRefMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"warning_refs\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"warning_refs\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "models: unable to insert into warning_refs") + } + + if !cached { + warningRefInsertCacheMut.Lock() + warningRefInsertCache[key] = cache + warningRefInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the WarningRef. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *WarningRef) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + warningRefUpdateCacheMut.RLock() + cache, cached := warningRefUpdateCache[key] + warningRefUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + warningRefAllColumns, + warningRefPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("models: unable to update warning_refs, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"warning_refs\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, wl), + strmangle.WhereClause("\"", "\"", 0, warningRefPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(warningRefType, warningRefMapping, append(wl, warningRefPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update warning_refs row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by update for warning_refs") + } + + if !cached { + warningRefUpdateCacheMut.Lock() + warningRefUpdateCache[key] = cache + warningRefUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q warningRefQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all for warning_refs") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected for warning_refs") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o WarningRefSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("models: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), warningRefPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"warning_refs\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 0, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, warningRefPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to update all in warningRef slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: unable to retrieve rows affected all in update all warningRef") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *WarningRef) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error { + if o == nil { + return errors.New("models: no warning_refs provided for upsert") + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(warningRefColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + warningRefUpsertCacheMut.RLock() + cache, cached := warningRefUpsertCache[key] + warningRefUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, ret := insertColumns.InsertColumnSet( + warningRefAllColumns, + warningRefColumnsWithDefault, + warningRefColumnsWithoutDefault, + nzDefaults, + ) + update := updateColumns.UpdateColumnSet( + warningRefAllColumns, + warningRefPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("models: unable to upsert warning_refs, could not build update column list") + } + + conflict := conflictColumns + if len(conflict) == 0 { + conflict = make([]string, len(warningRefPrimaryKeyColumns)) + copy(conflict, warningRefPrimaryKeyColumns) + } + cache.query = buildUpsertQuerySQLite(dialect, "\"warning_refs\"", updateOnConflict, ret, update, conflict, insert) + + cache.valueMapping, err = queries.BindMapping(warningRefType, warningRefMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(warningRefType, warningRefMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "models: unable to upsert warning_refs") + } + + if !cached { + warningRefUpsertCacheMut.Lock() + warningRefUpsertCache[key] = cache + warningRefUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single WarningRef record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *WarningRef) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("models: no WarningRef provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), warningRefPrimaryKeyMapping) + sql := "DELETE FROM \"warning_refs\" WHERE \"warnref_id\"=?" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete from warning_refs") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by delete for warning_refs") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q warningRefQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("models: no warningRefQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from warning_refs") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for warning_refs") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o WarningRefSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(warningRefBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), warningRefPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"warning_refs\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, warningRefPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "models: unable to delete all from warningRef slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "models: failed to get rows affected by deleteall for warning_refs") + } + + if len(warningRefAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *WarningRef) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindWarningRef(ctx, exec, o.WarnrefID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *WarningRefSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := WarningRefSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), warningRefPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"warning_refs\".* FROM \"warning_refs\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, warningRefPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "models: unable to reload all in WarningRefSlice") + } + + *o = slice + + return nil +} + +// WarningRefExists checks if the WarningRef row exists. +func WarningRefExists(ctx context.Context, exec boil.ContextExecutor, warnrefID int64) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"warning_refs\" where \"warnref_id\"=? limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, warnrefID) + } + row := exec.QueryRowContext(ctx, sql, warnrefID) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "models: unable to check if warning_refs exists") + } + + return exists, nil +} diff --git a/pkg/config/config.go b/pkg/config/config.go new file mode 100644 index 0000000..b59ce27 --- /dev/null +++ b/pkg/config/config.go @@ -0,0 +1,119 @@ +package config + +import ( + "encoding/json" + "os" + "path/filepath" + + "github.com/bebop/ark/pkg/env" +) + +// Read reads the config file and returns the config. +func Read(fileName string) (Config, error) { + configPath := filepath.Join(env.RootPath(), fileName) + configFile, err := os.Open(configPath) + defer configFile.Close() + if err != nil { + return Config{}, err + } + + var config Config + json.NewDecoder(configFile).Decode(&config) + return config, nil +} + +// Write writes the config to the given file. +func Write(fileName string, config Config) error { + configPath := filepath.Join(env.RootPath(), fileName) + configFile, _ := os.Create(configPath) + defer configFile.Close() + + json.NewEncoder(configFile).Encode(config) + return nil +} + +// Config is the configuration for the ark application. +type Config struct { + // Production or development. + IsProd bool `json:"prod"` + + // RootPath is the root directory of the project. + RootPath string `json:"root_path"` + + // DataPath is the data directory of the project. + DataPath string `json:"data_path"` + + // ArkPath is the path to the ark sqlite database. + ArkPath string `json:"ark_path"` + + // RheaRDF is the path to the Rhea RDF file. + RheaRDF string `json:"rhea_rdf"` + + // RheaToUniprotSprot is the path to the Rhea to Uniprot Sprot mapping file. + RheaToUniprotSprot string `json:"rhea_to_uniprot_sprot"` + + // RheaToUniprotTrembl is the path to the Rhea to Uniprot Trembl mapping file. + RheaToUniprotTrembl string `json:"rhea_to_uniprot_trembl"` + + // ChemblSchema is the path to the Chembl schema file. + ChemblSchema string `json:"chembl_schema"` + + // ChemblSQLite is the path to the CHEMBL sqlite file. + ChemblSQLite string `json:"chembl_sqlite"` + + // UniprotSprotXML is the path to the Uniprot Sprot XML file. + UniprotSprotXML string `json:"uniprot_sprot_xml"` + + // UniprotTremblXML is the path to the Uniprot Trembl XML file. + UniprotTremblXML string `json:"uniprot_trembl_xml"` + + // Genbank is the path to the Genbank directory. + Genbank string `json:"genbank"` +} + +// DevDefault returns the default configuration for development. +func DevDefault() Config { + + devPath := filepath.Join(env.RootPath(), "data", "dev") + chemblSchemaPath := filepath.Join(env.RootPath(), "data", "chembl_schema.sql") + return Config{ + IsProd: false, + RootPath: env.RootPath(), + DataPath: devPath, + ArkPath: filepath.Join(devPath, "ark.sqlite"), + RheaRDF: filepath.Join(devPath, "rhea_mini.rdf.gz"), + RheaToUniprotSprot: filepath.Join(devPath, "rhea_to_uniprot_sprot.tsv.gz"), + RheaToUniprotTrembl: filepath.Join(devPath, "rhea_to_uniprot_trembl.tsv.gz"), + ChemblSchema: chemblSchemaPath, + ChemblSQLite: filepath.Join(devPath, "chembl.sqlite"), + UniprotSprotXML: filepath.Join(devPath, "uniprot_sprot_test.xml.gz"), + UniprotTremblXML: filepath.Join(devPath, "uniprot_sprot_test.xml.gz"), + Genbank: filepath.Join(devPath, "genbank"), + } +} + +// ProdDefault returns the default configuration for production. +func ProdDefault() Config { + prodPath := filepath.Join(env.RootPath(), "data", "prod") + chemblSchemaPath := filepath.Join(env.RootPath(), "data", "chembl_schema.sql") + + return Config{ + IsProd: true, + RootPath: env.RootPath(), + DataPath: prodPath, + ArkPath: filepath.Join(prodPath, "ark.sqlite"), + RheaRDF: filepath.Join(prodPath, "rhea.rdf.gz"), + RheaToUniprotSprot: filepath.Join(prodPath, "rhea_to_uniprot_sprot.tsv"), + RheaToUniprotTrembl: filepath.Join(prodPath, "rhea_to_uniprot_trembl.tsv"), + ChemblSchema: chemblSchemaPath, + ChemblSQLite: filepath.Join(prodPath, "chembl.sqlite"), + UniprotSprotXML: filepath.Join(prodPath, "uniprot_sprot.xml.gz"), + UniprotTremblXML: filepath.Join(prodPath, "uniprot_trembl.xml.gz"), + Genbank: filepath.Join(prodPath, "genbank"), + } +} + +// TestDefault returns the default configuration for testing. +func TestDefault() Config { + return DevDefault() +} diff --git a/pkg/download/chembl.go b/pkg/download/chembl.go new file mode 100644 index 0000000..a057d08 --- /dev/null +++ b/pkg/download/chembl.go @@ -0,0 +1,50 @@ +package download + +import ( + "log" + "net/http" + "strings" +) + +// Chembl checks the latest release for Chembl, downloads and unpacks their sqlite release tarball and saves it to disk write path. +func Chembl(writePath string) { + links, err := Links("https://ftp.ebi.ac.uk/pub/databases/chembl/ChEMBLdb/latest/") + + if err != nil { + log.Fatal(err) + } + + var sqliteFileLink string + + // find the sqlite file link + for _, link := range links { + // if it's a sqlite tarball save its link + if strings.Contains(link, "sqlite.tar.gz") { + sqliteFileLink = link + break + } + } + + // if we didn't find it, bail. + if sqliteFileLink == "" { + log.Fatal("could not find sqlite file link") + } + + // get the tarball from the server that contains the sqlite file + response, err := http.Get(sqliteFileLink) + if err != nil { + log.Fatal(err) + } + defer response.Body.Close() + + // if server ain't good, bail + if response.StatusCode != 200 { + log.Fatalf("status code error: %d %s", response.StatusCode, response.Status) + } + + // extract our sqlite file from the tarball and write to disk + err = Tarball(response.Body, ".db", writePath) + if err != nil { + log.Fatal(err) + } +} diff --git a/pkg/download/download.go b/pkg/download/download.go new file mode 100644 index 0000000..2bf0569 --- /dev/null +++ b/pkg/download/download.go @@ -0,0 +1,32 @@ +package download + +// ark literally downloads all the base data needed to build a standard ark deployment +// the amount of data is dummy high to casually test on your personal machine. Run at your own risk. +func ark() { + writePath := "../data/build" + + // Typically I'd write these functions to return errors but since I'm using go routines + // the blocking nature of using channels to report errors would either make the + // concurrency of go routines moot or make it so the returned errors were not returned until + // all of the go routines were done which in this case kind of makes reporting errors a bit useless. + + // The solution here is that all of the functions called by the go routines will just log fatal errors. + + // I suppose it may be of some use to report when go routines are finished for the user's sake but that isn't a priority for + // this pull request. + + // get Rhea - ~300MB total. + go Rhea(writePath) + + // get CHEMBL Sqlite file - ~300MB compressed. + go Chembl(writePath) + + // get curated sprot uniprot - ~1GB compressed. + go File("https://ftp.uniprot.org/pub/databases/uniprot/current_release/knowledgebase/complete/uniprot_sprot.xml.gz", writePath) + + // get chaotic trembl uniprot - ~160GB compressed. + go File("https://ftp.uniprot.org/pub/databases/uniprot/current_release/knowledgebase/complete/uniprot_trembl.xml.gz", writePath) + + // gets all of annotated genbank - Not sure how big it is as of writing this but it's a lot. + go Genbank(writePath) +} diff --git a/pkg/download/file.go b/pkg/download/file.go new file mode 100644 index 0000000..b442c68 --- /dev/null +++ b/pkg/download/file.go @@ -0,0 +1,53 @@ +package download + +import ( + "io" + "log" + "net/http" + "net/url" + "os" + "path/filepath" +) + +// File downloads the file at the specified url and saves it to the specified writePath. +func File(fileURL string, writePath string) { + // get the file from the server + response, err := http.Get(fileURL) + if err != nil { + log.Fatal(err) + } + + defer response.Body.Close() + + // if server ain't good, bail + if response.StatusCode != 200 { + log.Fatalf("status code error: %d %s", response.StatusCode, response.Status) + } + + // if the filepath does not exist, create it + err = os.MkdirAll(writePath, os.ModePerm) + if err != nil { + log.Fatal(err) + } + + // parse url for filename + parsedURL, err := url.Parse(fileURL) + if err != nil { + log.Fatal(err) + } + + filename := filepath.Base(parsedURL.Path) + pathname := filepath.Join(writePath, filename) + + // create a new file to write the data to it + file, err := os.Create(pathname) + if err != nil { + log.Fatal(err) + } + defer file.Close() + + // copy the file to disk + if _, err := io.Copy(file, response.Body); err != nil { + log.Fatal(err) + } +} diff --git a/pkg/download/file_test.go b/pkg/download/file_test.go new file mode 100644 index 0000000..6f392f2 --- /dev/null +++ b/pkg/download/file_test.go @@ -0,0 +1,20 @@ +package download_test + +import ( + "io/ioutil" + "os" + "testing" + + "github.com/bebop/ark/pkg/download" +) + +func TestFile(t *testing.T) { + tmpDataDir, err := ioutil.TempDir("", "data-*") + if err != nil { + t.Errorf("Error: %s", err) + } + defer os.RemoveAll(tmpDataDir) + + download.File("https://ftp.expasy.org/databases/rhea/rdf/rhea.rdf.gz", tmpDataDir) + download.File("https://ftp.expasy.org/databases/rhea/tsv/rhea2uniprot_sprot.tsv", tmpDataDir) +} diff --git a/pkg/download/genbank.go b/pkg/download/genbank.go new file mode 100644 index 0000000..6ee88af --- /dev/null +++ b/pkg/download/genbank.go @@ -0,0 +1,32 @@ +package download + +import ( + "fmt" + "log" + "net/url" + "path/filepath" +) + +// Genbank checks the latest release of Genbank, grabs all files ending with .gz extension and saves to disk location specified by writePath. +func Genbank(writePath string) { + writePathDirectory := filepath.Join(writePath, "genbank") + links, err := Links("https://ftp.ncbi.nlm.nih.gov/genbank") + if err != nil { + log.Fatal(err) + } + + for _, link := range links { + parsedURL, err := url.Parse(link) + if err != nil { + log.Fatal(err) + } + + filename := filepath.Base(parsedURL.Path) + extension := filepath.Ext(filename) + + if extension == ".gz" { // if it's a gzipped file it's a genbank file so download it + fmt.Println("retrieving: " + link) + go File(link, writePathDirectory) + } + } +} diff --git a/pkg/download/links.go b/pkg/download/links.go new file mode 100644 index 0000000..27dfb5e --- /dev/null +++ b/pkg/download/links.go @@ -0,0 +1,39 @@ +package download + +import ( + "log" + "net/http" + + "github.com/PuerkitoBio/goquery" +) + +// Links returns a slice of all the links on the page at the specified url. +func Links(url string) ([]string, error) { + // get the page + response, err := http.Get(url) + if err != nil { + log.Fatal(err) + } + defer response.Body.Close() + + // if server ain't good, bail + if response.StatusCode != 200 { + log.Fatalf("status code error: %d %s", response.StatusCode, response.Status) + } + + // parse the page into a document goquery can use + doc, err := goquery.NewDocumentFromReader(response.Body) + if err != nil { + log.Fatal(err) + } + // initialize links slice to hold all the links pulled from following mapping + var links []string + doc.Find("a").Each(func(i int, selection *goquery.Selection) { // use a goquery selector to get all links on the page + // For each item found, get the link + link, _ := selection.Attr("href") + if link != "" { // if the link is not empty append it to the slice + links = append(links, link) + } + }) + return links, err +} diff --git a/pkg/download/links_test.go b/pkg/download/links_test.go new file mode 100644 index 0000000..013ba7f --- /dev/null +++ b/pkg/download/links_test.go @@ -0,0 +1,17 @@ +package download_test + +import ( + "testing" + + "github.com/bebop/ark/pkg/download" +) + +func TestGetPageLinks(t *testing.T) { + links, err := download.Links("http://example.com/") + if err != nil { + t.Errorf("Error: %s", err) + } + if len(links) == 0 { + t.Errorf("Error: No links found") + } +} diff --git a/pkg/download/rhea.go b/pkg/download/rhea.go new file mode 100644 index 0000000..20f6b94 --- /dev/null +++ b/pkg/download/rhea.go @@ -0,0 +1,12 @@ +package download + +// Rhea downloads all required files for ark from the Rhea database. +func Rhea(writePath string) { + go File("https://ftp.expasy.org/databases/rhea/rdf/rhea.rdf.gz", writePath) + + // get Rhea to curated uniprot mappings - relatively small. + go File("https://ftp.expasy.org/databases/rhea/tsv/rhea2uniprot_sprot.tsv", writePath) + + // get Rhea to chaotic uniprot mappings - larger than sprot but still relatively small. + go File("https://ftp.expasy.org/databases/rhea/tsv/rhea2uniprot_trembl.tsv.gz", writePath) +} diff --git a/pkg/download/tarball.go b/pkg/download/tarball.go new file mode 100644 index 0000000..f93a9e2 --- /dev/null +++ b/pkg/download/tarball.go @@ -0,0 +1,62 @@ +package download + +import ( + "archive/tar" + "compress/gzip" + "io" + "log" + "os" + "path/filepath" + "strings" +) + +// Tarball takes a gzipped tarball via Reader and extracts the first file to match fileNamePattern and then writes it to disk at writePath. +func Tarball(responseBody io.ReadCloser, fileNamePattern string, writePath string) error { + // unzip the tarball + tarball, err := gzip.NewReader(responseBody) + if err != nil { + log.Fatal(err) + } + defer tarball.Close() + // create a new tarball reader to iterate through like a directory + directory := tar.NewReader(tarball) + var filename string // will save the filename of the file we're writing + // iterate through the tarball and save the file we're looking for. + for { + header, err := directory.Next() // this creates a side effect that we'll exploit outside of this loop to actually save the file + if err == io.EOF { // this is the signal that we're done if we haven't already found the file we're looking for + break + } + if err != nil { + log.Fatal(err) + } + if strings.Contains(header.Name, fileNamePattern) { // assuming that our tarball will only contain one file that will match our pattern. + filename = filepath.Base(header.Name) + break + } + } + + // if the file exists write to disk + if filename != "" { + // if the filepath does not exist, create it + err = os.MkdirAll(writePath, os.ModePerm) + if err != nil { + log.Fatal(err) + } + + // create empty file to write to + file, err := os.Create(filepath.Join(writePath, filename)) + + if err != nil { + log.Fatal(err) + } + + defer file.Close() + + // copy the compressed file to disk + if _, err := io.Copy(file, directory); err != nil { // that side effect I mentioned in the above for loop makes this possible to do out of loop. + log.Fatal(err) + } + } + return err +} diff --git a/pkg/download/tarball_test.go b/pkg/download/tarball_test.go new file mode 100644 index 0000000..32bd65f --- /dev/null +++ b/pkg/download/tarball_test.go @@ -0,0 +1,29 @@ +package download_test + +import ( + "io/ioutil" + "net/http" + "os" + "testing" + + "github.com/bebop/ark/pkg/download" +) + +func TestTarball(t *testing.T) { + tmpDataDir, err := ioutil.TempDir("", "data-*") + if err != nil { + t.Errorf("Error: %s", err) + } + defer os.RemoveAll(tmpDataDir) + + response, err := http.Get("https://github.com/TimothyStiles/poly/archive/refs/tags/v0.0.0.tar.gz") + if err != nil { + t.Errorf("Error: %s", err) + } + defer response.Body.Close() + + err = download.Tarball(response.Body, "README", tmpDataDir) + if err != nil { + t.Errorf("Error: %s", err) + } +} diff --git a/pkg/env/env.go b/pkg/env/env.go new file mode 100644 index 0000000..93a1793 --- /dev/null +++ b/pkg/env/env.go @@ -0,0 +1,30 @@ +package env + +import ( + "path" + "path/filepath" + "runtime" + "strings" +) + +// RootPath returns the root directory of the project as an absolute path. +func RootPath() string { + _, runfile, _, _ := runtime.Caller(0) + dir := filepath.Join("/", path.Dir(runfile)) + + splitPath := strings.Split(dir, "/") + var lastarkIndex int + + // get last slice element equal to "ark" of splitPath + for index, substring := range splitPath { + if substring == "ark" { + lastarkIndex = index + } + } + + splitRootPath := splitPath[:lastarkIndex+1] + rootPathWithoutSlash := filepath.Join(splitRootPath...) + rootPath := path.Join("/", rootPathWithoutSlash) + + return rootPath +} diff --git a/pkg/insert/rhea.go b/pkg/insert/rhea.go new file mode 100644 index 0000000..06717e0 --- /dev/null +++ b/pkg/insert/rhea.go @@ -0,0 +1,51 @@ +package insert + +import ( + "context" + + "github.com/bebop/ark/pkg/config" + "github.com/bebop/ark/pkg/rhea" + "github.com/jmoiron/sqlx" + "github.com/volatiletech/sqlboiler/v4/boil" +) + +// Rhea parses and inserts Rhea data into the database. +func Rhea(ctx context.Context, db *sqlx.DB, config config.Config) error { + // parse Rhea file + rheaBytes, err := rhea.ReadGzippedXML(config.RheaRDF) + columns := boil.Infer() + if err != nil { + return err + } + + parsedRhea, err := rhea.Parse(rheaBytes) + if err != nil { + return err + } + + // insert Rhea reactions into database + for _, reaction := range parsedRhea.Reactions { + err = reaction.Insert(ctx, db, columns) + if err != nil { + // return err + } + } + + // insert Rhea compounds into database + for _, compound := range parsedRhea.Compounds { + err = compound.Insert(ctx, db, columns) + if err != nil { + return err + } + } + + // insert Rhea reactionParticipants into database + for _, reactionParticipant := range parsedRhea.ReactionParticipants { + err = reactionParticipant.Insert(ctx, db, columns) + if err != nil { + return err + } + } + + return nil +} diff --git a/pkg/insert/rhea_test.go b/pkg/insert/rhea_test.go new file mode 100644 index 0000000..afe5d2e --- /dev/null +++ b/pkg/insert/rhea_test.go @@ -0,0 +1,66 @@ +package insert + +import ( + "context" + "io/ioutil" + "log" + "os" + "path/filepath" + "testing" + + "github.com/bebop/ark/pkg/config" + "github.com/bebop/ark/schema" + "github.com/jmoiron/sqlx" + _ "modernc.org/sqlite" +) + +func TestRhea(t *testing.T) { + ctx := context.Background() + tmpDataDir, err := ioutil.TempDir("", "data-*") + tmpConfig := config.DevDefault() + if err != nil { + t.Errorf("Failed to create temporary data directory for TestRhea") + } + defer os.RemoveAll(tmpDataDir) + + tmpConfig.ArkPath = filepath.Join(tmpDataDir, "rheaTest.db") + + //create test database + err = schema.CreateDatabase(tmpConfig) + + db, err := sqlx.Open("sqlite", tmpConfig.ArkPath) + if err != nil { + log.Fatalf("Failed to open sqlite in ark.db: %s", err) + } + + type args struct { + ctx context.Context + db *sqlx.DB + config config.Config + } + + tests := []struct { + name string + args args + wantErr bool + }{ + { + name: "TestRhea", + args: args{ + ctx: ctx, + db: db, + config: tmpConfig, + }, + wantErr: false, + }, + // TODO: Add test cases. + + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if err := Rhea(tt.args.ctx, tt.args.db, tt.args.config); (err != nil) != tt.wantErr { + t.Errorf("Rhea() error = %v, wantErr %v", err, tt.wantErr) + } + }) + } +} diff --git a/pkg/rhea/rhea.go b/pkg/rhea/rhea.go index d61e8ff..ed01bf6 100644 --- a/pkg/rhea/rhea.go +++ b/pkg/rhea/rhea.go @@ -11,6 +11,9 @@ import ( "os" "strconv" "strings" + + "github.com/bebop/ark/models" + "github.com/volatiletech/null/v8" ) /****************************************************************************** @@ -49,9 +52,9 @@ the entire Rhea database is basically a series of: // Rhea is a struct of the entire Rhea Database in a simplified higher-level way. type Rhea struct { - ReactionParticipants []ReactionParticipant `json:"reactionParticipants"` - Compounds []Compound `json:"compounds"` - Reactions []Reaction `json:"reactions"` + ReactionParticipants []models.ReactionParticipant `json:"reactionParticipants"` + Compounds []models.Compound `json:"compounds"` + Reactions []models.Reaction `json:"reactions"` } // Compound is a struct of a Rhea compound. These are chemicals - sometimes they are @@ -120,119 +123,127 @@ which contains all of the higher level structs ******************************************************************************/ // NewReaction returns a Reaction. -func NewReaction(description Description, subclass Subclass) Reaction { - return Reaction{ - ID: description.ID, - Directional: subclass.Resource == "http://rdf.rhea-db.org/DirectionalReaction", - Accession: description.Accession, - Status: description.Status.Resource, - Comment: description.Comment, - Equation: description.Equation, - HTMLEquation: description.HTMLEquation, - IsChemicallyBalanced: description.IsChemicallyBalanced, - IsTransport: description.IsTransport, - Ec: description.EC.Resource, - Citations: description.CitationStrings(), - Substrates: description.SubstrateAccessionIDs(), - Products: description.ProductAccessionIDs(), - SubstrateOrProducts: description.SubstrateOrProductAccessionIDs(), - Location: description.Location.Resource} +func NewReaction(description Description, subclass Subclass) models.Reaction { + return models.Reaction{ + ID: null.Int64From(int64(description.ID)), + Directional: strconv.FormatBool((subclass.Resource == "http://rdf.rhea-db.org/DirectionalReaction")), + Accession: null.StringFrom(description.Accession), + Status: null.StringFrom(description.Status.Resource), + Comment: null.StringFrom(description.Comment), + Equation: null.StringFrom(description.Equation), + HTMLEquation: null.StringFrom(description.HTMLEquation), + IsChemicallyBalanced: strconv.FormatBool(description.IsChemicallyBalanced), + IsTransport: strconv.FormatBool(description.IsTransport), + Ec: null.StringFrom(description.EC.Resource), + // Citations: description.CitationStrings(), + // Substrates: description.SubstrateAccessionIDs(), + // Products: description.ProductAccessionIDs(), + // SubstrateOrProducts: description.SubstrateOrProductAccessionIDs(), + Location: null.StringFrom(description.Location.Resource)} } // NewCompound returns a Compound. -func NewCompound(description Description, subclass Subclass) Compound { - var newCompound Compound - compoundType := subclass.Resource[23:] +func NewCompound(description Description, subclass Subclass) models.Compound { + var newCompound models.Compound + compoundType := string(subclass.Resource)[23:] switch subclass.Resource { case "http://rdf.rhea-db.org/SmallMolecule", "http://rdf.rhea-db.org/Polymer": - newCompound = Compound{ - ID: description.ID, - Accession: description.About, - Position: description.Position, - Name: description.Name, - HTMLName: description.HTMLName, - Formula: description.Formula, - Charge: description.Charge, - ChEBI: description.ChEBI.Resource, - - CompoundID: description.ID, - CompoundAccession: description.Accession, - CompoundName: description.Name, - CompoundHTMLName: description.HTMLName, - CompoundType: compoundType} + newCompound = models.Compound{ + ID: int64(description.ID), + Accession: null.StringFrom(description.About), // should be url + // Accession: null.StringFrom(description.Accession) + Position: null.StringFrom(description.Position), + Name: null.StringFrom(description.Name), + HTMLName: null.StringFrom(description.HTMLName), + Formula: null.StringFrom(description.Formula), + Charge: null.StringFrom(description.Charge), + Chebi: null.StringFrom(description.ChEBI.Resource), + + // CompoundID: description.ID, + // CompoundAccession: description.Accession, + // CompoundName: description.Name, + // CompoundHTMLName: description.HTMLName, + CompoundType: compoundType} if compoundType == "Polymer" { - newCompound.ChEBI = description.UnderlyingChEBI.Resource + newCompound.Chebi = null.StringFrom(description.UnderlyingChEBI.Resource) } // Add subclass ChEBI - for _, sc := range description.Subclass { - if strings.Contains(sc.Resource, "CHEBI") { - newCompound.SubclassOfChEBI = sc.Resource - } - } + // for _, sc := range description.Subclass { + // // if strings.Contains(sc.Resource, "CHEBI") { + // fmt.Println(newCompound.Chebi) + // fmt.Println(sc.Resource) + // // } + // } case "http://rdf.rhea-db.org/GenericPolypeptide", "http://rdf.rhea-db.org/GenericPolynucleotide", "http://rdf.rhea-db.org/GenericHeteropolysaccharide": - newCompound = Compound{ - Accession: description.About, - CompoundID: description.ID, - CompoundName: description.Name, - CompoundHTMLName: description.HTMLName, - CompoundType: compoundType} + newCompound = models.Compound{ + Accession: null.StringFrom(description.About), + ID: int64(description.ID), + Name: null.StringFrom(description.Name), + HTMLName: null.StringFrom(description.HTMLName), + CompoundType: compoundType} } return newCompound } // NewReactionParticipant returns a ReactionParticipant. -func NewReactionParticipant(description Description, containsx ContainsX, compoundParticipantMap map[string]string) (ReactionParticipant, error) { +func NewReactionParticipant(description Description, containsx ContainsX, compoundParticipantMap map[string]string) (models.ReactionParticipant, error) { // Get reaction sides // gzip -d -k -c rhea.rdf.gz | grep -o -P '(?<=contains).*(?= rdf)' | tr ' ' '\n' | sort -u | tr '\n' ' ' // The exceptions to numeric contains are 2n, N, Nminus1, and Nplus1 - var newReactionParticipant ReactionParticipant + var newReactionParticipant models.ReactionParticipant switch containsx.XMLName.Local { case "containsN": - newReactionParticipant = ReactionParticipant{ - ReactionSide: description.About, - Contains: 1, - ContainsN: true, - Minus: false, - Plus: false, - Accession: containsx.Content} + newReactionParticipant = models.ReactionParticipant{ + Reactionside: description.About, + Contains: null.Int64From(int64(1)), + ContainsN: strconv.FormatBool(true), + Minus: strconv.FormatBool(false), + Plus: strconv.FormatBool(false), + // Accession: containsx.Content + } case "contains2n": - newReactionParticipant = ReactionParticipant{ - ReactionSide: description.About, - Contains: 2, - ContainsN: true, - Minus: false, - Plus: false, - Accession: containsx.Content} + newReactionParticipant = models.ReactionParticipant{ + Reactionside: description.About, + Contains: null.Int64From(int64(2)), + ContainsN: strconv.FormatBool(true), + Minus: strconv.FormatBool(false), + Plus: strconv.FormatBool(false), + // Accession: containsx.Content + } case "containsNminus1": - newReactionParticipant = ReactionParticipant{ - ReactionSide: description.About, - Contains: 1, - ContainsN: true, - Minus: true, - Plus: false, - Accession: containsx.Content} + newReactionParticipant = models.ReactionParticipant{ + Reactionside: description.About, + Contains: null.Int64From(int64(1)), + ContainsN: strconv.FormatBool(true), + Minus: strconv.FormatBool(true), + Plus: strconv.FormatBool(false), + // Accession: containsx.Content + } case "containsNplus1": - newReactionParticipant = ReactionParticipant{ - ReactionSide: description.About, - Contains: 1, - ContainsN: true, - Minus: false, - Plus: true, - Accession: containsx.Content} + newReactionParticipant = models.ReactionParticipant{ + Reactionside: description.About, + Contains: null.Int64From(int64(1)), + ContainsN: strconv.FormatBool(true), + Minus: strconv.FormatBool(false), + Plus: strconv.FormatBool(true), + // Accession: containsx.Content + } default: i, err := strconv.Atoi(containsx.XMLName.Local[8:]) if err != nil { - return ReactionParticipant{}, err + return models.ReactionParticipant{}, err + } + newReactionParticipant = models.ReactionParticipant{ + + Reactionside: description.About, + Contains: null.Int64From(int64(i)), + ContainsN: strconv.FormatBool(false), + Minus: strconv.FormatBool(false), + Plus: strconv.FormatBool(false), + // Accession: containsx.Content } - newReactionParticipant = ReactionParticipant{ - ReactionSide: description.About, - Contains: i, - ContainsN: false, - Minus: false, - Plus: false, - Accession: containsx.Content} } - newReactionParticipant.Compound = compoundParticipantMap[description.Contains.Resource] + newReactionParticipant.Compound = null.StringFrom(description.Contains.Resource) return newReactionParticipant, nil } @@ -249,7 +260,7 @@ func Parse(rheaBytes []byte) (Rhea, error) { // Initialize Rhea var rhea Rhea compoundParticipantMap := make(map[string]string) - compoundMap := make(map[string]Compound) + compoundMap := make(map[string]models.Compound) for _, description := range rdf.Descriptions { // Handle the case of a single compound -> reactive part, such as @@ -287,14 +298,14 @@ func Parse(rheaBytes []byte) (Rhea, error) { if !ok { return Rhea{}, errors.New("Could not find " + description.About) } - newCompound.ID = description.ID - newCompound.CompoundAccession = description.About - newCompound.Position = description.Position - newCompound.Name = description.Name - newCompound.HTMLName = description.HTMLName - newCompound.Formula = description.Formula - newCompound.Charge = description.Charge - newCompound.ChEBI = description.ChEBI.Resource + newCompound.ID = int64(description.ID) + newCompound.Accession = null.StringFrom(description.About) + newCompound.Position = null.StringFrom(description.Position) + newCompound.Name = null.StringFrom(description.Name) + newCompound.HTMLName = null.StringFrom(description.HTMLName) + newCompound.Formula = null.StringFrom(description.Formula) + newCompound.Charge = null.StringFrom(description.Charge) + newCompound.Chebi = null.StringFrom(description.ChEBI.Resource) rhea.Compounds = append(rhea.Compounds, newCompound) } } @@ -312,8 +323,8 @@ func Parse(rheaBytes []byte) (Rhea, error) { return rhea, nil } -// ReadGzippedXml reads in a a gzip'd Rhea dump (https://www.rhea-db.org/help/download) into bytes. -func ReadGzippedXml(gzipPath string) ([]byte, error) { +// ReadGzippedXML reads in a a gzip'd Rhea dump (https://www.rhea-db.org/help/download) into bytes. +func ReadGzippedXML(gzipPath string) ([]byte, error) { // Get gz'd file bytes xmlFile, err := os.Open(gzipPath) if err != nil { @@ -339,7 +350,7 @@ func ReadGzippedXml(gzipPath string) ([]byte, error) { func Read(path string) (Rhea, error) { // Read the Compressed Rhea XML to bytes var rhea Rhea - rheaBytes, err := ReadGzippedXml(path) + rheaBytes, err := ReadGzippedXML(path) if err != nil { return rhea, err } diff --git a/pkg/rhea/rhea_test.go b/pkg/rhea/rhea_test.go index 7c5010d..c4dcf4d 100644 --- a/pkg/rhea/rhea_test.go +++ b/pkg/rhea/rhea_test.go @@ -6,14 +6,14 @@ import ( "os" "testing" + "github.com/bebop/ark/pkg/config" _ "github.com/mattn/go-sqlite3" ) -var rhea Rhea - func TestMain(m *testing.M) { + testConfig := config.TestDefault() var err error - rhea, err = Read("data/rhea_mini.rdf.gz") + _, err = Read(testConfig.RheaRDF) if err != nil { log.Fatalf("Failed to read rhea: %v", err) } @@ -25,15 +25,18 @@ func TestMain(m *testing.M) { func ExampleRhea_ExportJSON() { // Convert rhea to JSON - rheaJson, _ := rhea.ExportJSON() + testConfig := config.TestDefault() + rhea, _ := Read(testConfig.RheaRDF) + rheaJSON, _ := rhea.ExportJSON() - fmt.Println(string(rheaJson)[:100]) - // Output: {"reactionParticipants":[{"reactionside":"http://rdf.rhea-db.org/10000_L","contains":1,"containsn":f + fmt.Println(string(rheaJSON)[:100]) + // Output: {"reactionParticipants":[{"compound":"http://rdf.rhea-db.org/Participant_10000_compound_1283","react } func TestReadRheaToUniprot(t *testing.T) { + testConfig := config.TestDefault() lines := make(chan RheaToUniprot, 100) - go ReadRheaToUniprotTrembl("data/rhea2uniprot_sprot_minimized.tsv.gz", lines) + go ReadRheaToUniprotTrembl(testConfig.RheaToUniprotSprot, lines) var line RheaToUniprot for l := range lines { @@ -44,16 +47,3 @@ func TestReadRheaToUniprot(t *testing.T) { log.Fatalf("Got wrong uniprotId. Expected P06106, got %s", line.UniprotID) } } - -func ExampleReadRheaToUniprotSprot() { - lines := make(chan RheaToUniprot, 100) - go ReadRheaToUniprotSprot("data/rhea2uniprot_sprot_minimized.tsv", lines) - - var line RheaToUniprot - for l := range lines { - line = l - } - - fmt.Println(line) - // Output: {10048 UN 10048 P06106} -} diff --git a/schema/schema.go b/schema/schema.go new file mode 100644 index 0000000..05a4bc3 --- /dev/null +++ b/schema/schema.go @@ -0,0 +1,246 @@ +package schema + +import ( + "io/ioutil" + "log" + "strings" + + "github.com/bebop/ark/pkg/config" + "github.com/huandu/go-sqlbuilder" + "github.com/jmoiron/sqlx" + + _ "modernc.org/sqlite" +) + +// CreateDatabase creates a new database with the given name. +func CreateDatabase(config config.Config) error { + + // Begin SQLite + log.Println("Creating database...") + db, err := sqlx.Open("sqlite", config.ArkPath) + + if err != nil { + log.Fatalf("Failed to open sqlite database: %s", err) + } + + defer db.Close() + + // Execute our schema in memory + _, err = db.Exec(CreateSchema()) + if err != nil { + log.Fatalf("Failed to execute schema: %s", err) + } + + schemaStringBytes, err := ioutil.ReadFile(config.ChemblSchema) + if err != nil { + log.Fatalf("Failed to open chembl schema: %s", err) + } + + _, err = db.Exec(string(schemaStringBytes)) + if err != nil { + log.Fatalf("Failed to execute schema: %s", err) + } + + err = chemblAttach(db, config.ArkPath) + if err != nil { + log.Fatalf("Failed to attach chembl with error %s", err) + } + + return err +} + +// CreateSchema generates the SQL for the database schema minus attachments which are handled in createDatabase. +func CreateSchema() string { + + // Note: + // Some variables have wonky capitalizations. + // This is because the SQLite driver is case-sensitive + // and we want to be consistent with the rest of the function. + + // Frequenty used strings in schema definition defined here + // for convenience and to avoid typos. + const ( + TEXT = "TEXT" + INTEGER = "INT" + BOOL = "BOOL" + NOTNULL = "NOT NULL" + PRIMARYKEY = "PRIMARY KEY" + DEFAULTFALSE = "DEFAULT FALSE" + DEFAULTTRUE = "DEFAULT TRUE" + SEQHASH = "seqhash" + ACCESSION = "accession" + GENBANK = "genbank" + REFERENCESEQHASH = "REFERENCES seqhash(seqhash)" + REFERENCECHEBIACCESSION = "REFERENCES chebi(accession)" + REFERENCECOMPOUNDACCESSION = "REFERENCES compound(accession)" + REFERENCEREACTIONACCESSION = "REFERENCES reaction(accession)" + REFERENCEREACTIONSIDEACCESSION = "REFERENCES reactionside(accession)" + ID = "id" + NAME = "name" + COMPOUND = "compound" + CHEBI = "chebi" + HTMLNAME = "html_name" + REACTION = "reaction" + REACTIONSIDE = "reactionside" + UNIPROT = "uniprot" + arkDOT = "" + ark = "ark" + ) + + // each built string will be appended to this slice and returned at the end of the function + var tableStringSlice []string + + // // create the ark database itself + // databaseDeclaration := "CREATE DATABASE " + ark + // tableStringSlice = append(tableStringSlice, databaseDeclaration) + + // create seqhash table + seqhash := sqlbuilder.NewCreateTableBuilder() + seqhash.CreateTable(SEQHASH).IfNotExists() + seqhash.Define(SEQHASH, TEXT, NOTNULL, PRIMARYKEY) + seqhash.Define("sequence", TEXT, NOTNULL) + seqhash.Define("circular", BOOL, NOTNULL, DEFAULTFALSE) + seqhash.Define("doublestranded", BOOL, NOTNULL, DEFAULTTRUE) + seqhash.Define("seqhashtype", TEXT, NOTNULL, "CHECK (seqhashtype IN ('DNA', 'RNA', 'PROTEIN'))") + seqhash.Define("translation", TEXT, REFERENCESEQHASH) + seqhashTableString, _ := seqhash.Build() + tableStringSlice = append(tableStringSlice, seqhashTableString) + // "CREATE TABLE IF NOT EXISTS seqhash (seqhash TEXT NOT NULL PRIMARY KEY, sequence TEXT NOT NULL, circular INTEGER NOT NULL DEFAULT FALSE, doublestranded INTEGER NOT NULL DEFAULT TRUE, seqhashtype TEXT NOT NULL CHECK (seqhashtype IN ('DNA', 'RNA', 'PROTEIN')), translations TEXT REFERENCES seqhash(seqhash))" + + // create genbank table + genbank := sqlbuilder.NewCreateTableBuilder() + genbank.CreateTable(GENBANK).IfNotExists() + genbank.Define(ACCESSION, TEXT, PRIMARYKEY) + genbank.Define(SEQHASH, TEXT, NOTNULL, REFERENCESEQHASH) + genbankTableString, _ := genbank.Build() + tableStringSlice = append(tableStringSlice, genbankTableString) + + // create genbank features table + genbankFeatures := sqlbuilder.NewCreateTableBuilder() + genbankFeatures.CreateTable("genbank_features").IfNotExists() + genbankFeatures.Define(SEQHASH, TEXT, NOTNULL, REFERENCESEQHASH) + genbankFeatures.Define("parent", TEXT, NOTNULL, "REFERENCES genbank(accession)") + genbankFeatures.Define("PRIMARY KEY(", SEQHASH, ", ", "parent", ")") + genbankFeaturesTableString, _ := genbankFeatures.Build() + tableStringSlice = append(tableStringSlice, genbankFeaturesTableString) + + // create uniprot table + uniprot := sqlbuilder.NewCreateTableBuilder() + uniprot.CreateTable(UNIPROT).IfNotExists() + uniprot.Define(ACCESSION, TEXT, PRIMARYKEY) + uniprot.Define("database", TEXT, NOTNULL) + uniprot.Define(SEQHASH, TEXT, NOTNULL, REFERENCESEQHASH) + uniprotTableString, _ := uniprot.Build() + tableStringSlice = append(tableStringSlice, uniprotTableString) + + //*** create rhea tables ***// + + // create chebi table + chebi := sqlbuilder.NewCreateTableBuilder() + chebi.CreateTable(CHEBI).IfNotExists() + chebi.Define(ACCESSION, TEXT, PRIMARYKEY) + chebi.Define("subclass_of", TEXT, REFERENCECHEBIACCESSION) + chebiTableString, _ := chebi.Build() + tableStringSlice = append(tableStringSlice, chebiTableString) + + // create compound table + compound := sqlbuilder.NewCreateTableBuilder() + compound.CreateTable(COMPOUND).IfNotExists() + compound.Define(ID, INTEGER, NOTNULL) + compound.Define(ACCESSION, TEXT, PRIMARYKEY) + compound.Define("position", TEXT) + compound.Define(NAME, TEXT) + compound.Define(HTMLNAME, TEXT) + compound.Define("formula", TEXT) + compound.Define("charge", TEXT) + compound.Define(CHEBI, TEXT, REFERENCECHEBIACCESSION) + compound.Define("polymerization_index", TEXT) + compound.Define("compound_type", TEXT, NOTNULL, "CHECK(compound_type IN ('SmallMolecule', 'Polymer', 'GenericPolypeptide', 'GenericPolynucleotide', 'GenericHeteropolysaccharide'))") + compoundTableString, _ := compound.Build() + tableStringSlice = append(tableStringSlice, compoundTableString) + + // create reactivePart table + reactivePart := sqlbuilder.NewCreateTableBuilder() + reactivePart.CreateTable("reactive_part").IfNotExists() + reactivePart.Define(ID, INTEGER) + reactivePart.Define(ACCESSION, TEXT, PRIMARYKEY) + reactivePart.Define(NAME, TEXT) + reactivePart.Define(HTMLNAME, TEXT) + reactivePart.Define(COMPOUND, TEXT, NOTNULL, REFERENCECOMPOUNDACCESSION) + reactivePartTableString, _ := reactivePart.Build() + tableStringSlice = append(tableStringSlice, reactivePartTableString) + + // create reaction table + reaction := sqlbuilder.NewCreateTableBuilder() + reaction.CreateTable(REACTION).IfNotExists() + reaction.Define(ID, INTEGER) + reaction.Define("directional", BOOL, NOTNULL, DEFAULTFALSE) + reaction.Define(ACCESSION, TEXT, PRIMARYKEY) + reaction.Define("status", TEXT) + reaction.Define("comment", TEXT) + reaction.Define("equation", TEXT) + reaction.Define("html_equation", TEXT) + reaction.Define("is_chemically_balanced", BOOL, NOTNULL, DEFAULTTRUE) + reaction.Define("is_transport", BOOL, NOTNULL, DEFAULTFALSE) + reaction.Define("ec", TEXT) + reaction.Define("location", TEXT) + reactionTableString, _ := reaction.Build() + tableStringSlice = append(tableStringSlice, reactionTableString) + + // create reactionside table + reactionside := sqlbuilder.NewCreateTableBuilder() + reactionside.CreateTable(REACTIONSIDE).IfNotExists() + reactionside.Define(ACCESSION, TEXT, PRIMARYKEY) + reactionsideTableString, _ := reactionside.Build() + tableStringSlice = append(tableStringSlice, reactionsideTableString) + + // create reactionside_reaction table + reactionsideReaction := sqlbuilder.NewCreateTableBuilder() + reactionsideReaction.CreateTable("reactionside_reaction").IfNotExists() + reactionsideReaction.Define(REACTION, TEXT, NOTNULL, REFERENCEREACTIONACCESSION) + reactionsideReaction.Define(REACTIONSIDE, TEXT, NOTNULL, REFERENCEREACTIONSIDEACCESSION) + reactionsideReaction.Define("reactionside_reaction_type", TEXT, NOTNULL, "CHECK(reactionside_reaction_type IN ('substrate_or_product', 'substrate', 'product'))") + reactionsideReaction.Define("PRIMARY KEY(", REACTION, ", ", REACTIONSIDE, ")") + reactionsideReactionTableString, _ := reactionsideReaction.Build() + tableStringSlice = append(tableStringSlice, reactionsideReactionTableString) + + // create reactionParticipant table + reactionParticipant := sqlbuilder.NewCreateTableBuilder() + reactionParticipant.CreateTable("reaction_participant").IfNotExists() + reactionParticipant.Define(COMPOUND, TEXT, REFERENCECOMPOUNDACCESSION) + reactionParticipant.Define(REACTIONSIDE, TEXT, NOTNULL, REFERENCEREACTIONSIDEACCESSION) + reactionParticipant.Define("contains", INTEGER) + reactionParticipant.Define("contains_n", BOOL, NOTNULL, DEFAULTFALSE) + reactionParticipant.Define("minus", BOOL, NOTNULL, DEFAULTFALSE) + reactionParticipant.Define("plus", BOOL, NOTNULL, DEFAULTFALSE) + reactionParticipant.Define("PRIMARY KEY(", COMPOUND, ", ", REACTIONSIDE, ")") + reactionparticipantTableString, _ := reactionParticipant.Build() + tableStringSlice = append(tableStringSlice, reactionparticipantTableString) + + // create uniprot_to_reaction table + uniprotToReaction := sqlbuilder.NewCreateTableBuilder() + uniprotToReaction.CreateTable("uniprot_to_reaction").IfNotExists() + uniprotToReaction.Define(REACTION, TEXT, REFERENCEREACTIONACCESSION) + uniprotToReaction.Define(UNIPROT, TEXT, "REFERENCES uniprot(accession)") + uniprotToReaction.Define("PRIMARY KEY(", REACTION, ", ", UNIPROT, ")") + uniprotToReactionTableString, _ := uniprotToReaction.Build() + tableStringSlice = append(tableStringSlice, uniprotToReactionTableString) + + schema := strings.Join(tableStringSlice, ";\n\n") + + return schema +} + +/****************************************************************************** + +Chembl + +******************************************************************************/ + +func chemblAttach(db *sqlx.DB, chembl string) error { + _, err := db.Exec("ATTACH DATABASE ? AS chembl", chembl) + if err != nil { + return err + } + return nil +} diff --git a/schema/schema_test.go b/schema/schema_test.go new file mode 100644 index 0000000..5e1a701 --- /dev/null +++ b/schema/schema_test.go @@ -0,0 +1,158 @@ +package schema + +import ( + //"context" + //"fmt" + + "io/ioutil" + "log" + + //"net/http" + "os" + "path/filepath" + "testing" + + "github.com/bebop/ark/pkg/config" + "github.com/jmoiron/sqlx" + + //"github.com/minio/minio-go/v7" + //"github.com/minio/minio-go/v7/pkg/credentials" + //"github.com/ory/dockertest/v3" + //dc "github.com/ory/dockertest/v3/docker" + + _ "github.com/mattn/go-sqlite3" +) + +var db *sqlx.DB + +//var minioClient *minio.Client + +func TestCreateDatabase(t *testing.T) { + tmpDataDir, err := ioutil.TempDir("", "data-*") + tmparkConfig := config.DevDefault() + tmparkConfig.ArkPath = filepath.Join(tmpDataDir, "test.db") + if err != nil { + t.Errorf("Failed to create temporary data directory") + } + defer os.RemoveAll(tmpDataDir) + + err = CreateDatabase(tmparkConfig) + + if err != nil { + log.Fatalf("Failed on error during database creation: %s", err) + } +} + +func TestMain(m *testing.M) { + var err error + //pool, err := dockertest.NewPool("") + //if err != nil { + // log.Fatalf("Could not connect to docker: %s", err) + //} + + //options := &dockertest.RunOptions{ + // Repository: "minio/minio", + // Tag: "latest", + // Cmd: []string{"server", "/data"}, + // PortBindings: map[dc.Port][]dc.PortBinding{"9000/tcp": {{HostPort: "9000"}}}, + // Env: []string{"MINIO_ACCESS_KEY=MYACCESSKEY", "MINIO_SECRET_KEY=MYSECRETKEY"}, + //} + + //resource, err := pool.RunWithOptions(options) + //if err != nil { + // log.Fatalf("Could not start resource: %s", err) + //} + + //endpoint := fmt.Sprintf("localhost:%s", resource.GetPort("9000/tcp")) + //// or you could use the following, because we mapped the port 9000 to the port 9000 on the host + //// endpoint := "localhost:9000" + + //// exponential backoff-retry, because the application in the container might not be ready to accept connections yet + //// the minio client does not do service discovery for you (i.e. it does not check if connection can be established), so we have to use the health check + //if err := pool.Retry(func() error { + // url := fmt.Sprintf("http://%s/minio/health/live", endpoint) + // resp, err := http.Get(url) + // if err != nil { + // return err + // } + // if resp.StatusCode != http.StatusOK { + // return fmt.Errorf("status code not OK") + // } + // resp.Body.Close() + // return nil + //}); err != nil { + // log.Fatalf("Could not connect to docker: %s", err) + //} + + //// now we can instantiate minio client + //minioClient, err = minio.New(endpoint, &minio.Options{ + // Creds: credentials.NewStaticV4("MYACCESSKEY", "MYSECRETKEY", ""), + // Secure: false, + //}) + //if err != nil { + // log.Println("Failed to create minio client:", err) + //} + + //// now we can use the client, for example, to list the buckets + //_, err = minioClient.ListBuckets(context.Background()) + //if err != nil { + // log.Fatalf("error while listing buckets: %v", err) + //} + + // Begin SQLite + db, err = sqlx.Open("sqlite3", ":memory:") + if err != nil { + log.Fatalf("Failed to open sqlite in memory: %s", err) + } + + // Execute our schema in memory + _, err = db.Exec(CreateSchema()) + if err != nil { + log.Fatalf("Failed to execute schema: %s", err) + } + + // Run the rest of our tests + code := m.Run() + + //// You can't defer this because os.Exit doesn't care for defer + //if err := pool.Purge(resource); err != nil { + // log.Fatalf("Could not purge resource: %s", err) + //} + + os.Exit(code) +} + +func TestChemblAttach(t *testing.T) { + tmpDataDir, err := ioutil.TempDir("", "data-*") + testConfig := config.DevDefault() + + if err != nil { + t.Errorf("Failed to create temporary data directory") + } + defer os.RemoveAll(tmpDataDir) + + tmpChemblDBPath := filepath.Join(tmpDataDir, "chembl.db") + + // Read Chembl schema + schemaStringBytes, err := ioutil.ReadFile(testConfig.ChemblSchema) + if err != nil { + t.Errorf("Failed to open chembl schema: %s", err) + } + + // Begin SQLite + chemblDB, err := sqlx.Open("sqlite3", tmpChemblDBPath) + if err != nil { + t.Errorf("Failed to open sqlite in %s: %s", tmpChemblDBPath, err) + } + + // Execute our schema in memory + _, err = chemblDB.Exec(string(schemaStringBytes)) + if err != nil { + t.Errorf("Failed to execute schema: %s", err) + } + + err = chemblAttach(db, tmpChemblDBPath) + if err != nil { + t.Errorf("Failed to attach chembl with error %s", err) + } +} diff --git a/sqlboiler.toml b/sqlboiler.toml new file mode 100644 index 0000000..4286bad --- /dev/null +++ b/sqlboiler.toml @@ -0,0 +1,5 @@ +# Absolute path is recommended since the location +# sqlite3 is being run can change. +# For example generation time and model test time. +[sqlite3] +dbname = "/Users/timothystiles/git/ark/data/ark.db" \ No newline at end of file