Skip to content

Commit

Permalink
feat: remove dataset selection from upload phase and set it as other …
Browse files Browse the repository at this point in the history
…fieldmapping
  • Loading branch information
edelclaux committed Dec 12, 2024
1 parent ce364f3 commit 3d4f693
Show file tree
Hide file tree
Showing 20 changed files with 27 additions and 172 deletions.
4 changes: 2 additions & 2 deletions backend/geonature/core/imports/checks/dataframe/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -230,7 +230,7 @@ def check_datasets(
uuid = df.loc[has_uuid_mask, uuid_col].unique().tolist()

datasets = {
ds.unique_dataset_id.hex: ds
str(ds.unique_dataset_id): ds
for ds in TDatasets.query.filter(TDatasets.unique_dataset_id.in_(uuid))
.options(sa.orm.joinedload(TDatasets.nomenclature_data_origin))
.options(sa.orm.raiseload("*"))
Expand All @@ -256,7 +256,7 @@ def check_datasets(

# Warning: we check only permissions of first author, but currently there it only one author per import.
authorized_datasets = {
ds.unique_dataset_id.hex: ds
str(ds.unique_dataset_id): ds
for ds in db.session.execute(
TDatasets.filter_by_creatable(
user=imprt.authors[0], module_code=module_code, object_code=object_code
Expand Down
6 changes: 0 additions & 6 deletions backend/geonature/core/imports/config_schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,6 @@
"show": True,
"filter": True,
},
{
"prop": "dataset.dataset_name",
"name": "Jeu de données",
"show": True,
"filter": False,
},
{
"prop": "statistics_rows",
"name": "Lignes importées",
Expand Down
5 changes: 0 additions & 5 deletions backend/geonature/core/imports/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@
from geonature.utils.celery import celery_app
from geonature.core.gn_permissions.tools import get_scopes_by_action
from geonature.core.gn_commons.models import TModules
from geonature.core.gn_meta.models import TDatasets
from pypnnomenclature.models import BibNomenclaturesTypes
from pypnusershub.db.models import User

Expand Down Expand Up @@ -316,8 +315,6 @@ def get_instance_permissions(self, scopes, user=None):
@serializable(
fields=[
"authors.nom_complet",
"dataset.dataset_name",
"dataset.active",
"destination.code",
"destination.label",
"destination.statistics_labels",
Expand Down Expand Up @@ -349,7 +346,6 @@ class TImports(InstancePermissionMixin, db.Model):
detected_encoding = db.Column(db.Unicode, nullable=True)
# import_table = db.Column(db.Unicode, nullable=True)
full_file_name = db.Column(db.Unicode, nullable=True)
id_dataset = db.Column(db.Integer, ForeignKey("gn_meta.t_datasets.id_dataset"), nullable=True)
date_create_import = db.Column(db.DateTime, default=datetime.now)
date_update_import = db.Column(db.DateTime, default=datetime.now, onupdate=datetime.now)
date_end_import = db.Column(db.DateTime, nullable=True)
Expand All @@ -369,7 +365,6 @@ class TImports(InstancePermissionMixin, db.Model):
)
loaded = db.Column(db.Boolean, nullable=False, default=False)
processed = db.Column(db.Boolean, nullable=False, default=False)
dataset = db.relationship(TDatasets, lazy="joined")
source_file = deferred(db.Column(db.LargeBinary))
columns = db.Column(ARRAY(db.Unicode))
# keys are target names, values are source names
Expand Down
42 changes: 1 addition & 41 deletions backend/geonature/core/imports/routes/imports.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
from geonature.core.gn_permissions import decorators as permissions
from geonature.core.gn_permissions.decorators import login_required
from geonature.core.gn_permissions.tools import get_scopes_by_action
from geonature.core.gn_meta.models import TDatasets

from pypnnomenclature.models import TNomenclatures

Expand Down Expand Up @@ -85,11 +84,6 @@ def get_import_list(scope, destination=None):
filters = []
if search:
filters.append(TImports.full_file_name.ilike(f"%{search}%"))
filters.append(
TImports.dataset.has(
func.lower(TDatasets.dataset_name).contains(func.lower(search)),
)
)
filters.append(
TImports.authors.any(
or_(
Expand All @@ -114,11 +108,9 @@ def get_import_list(scope, destination=None):
query = (
select(TImports)
.options(
contains_eager(TImports.dataset),
contains_eager(TImports.authors),
contains_eager(TImports.destination).contains_eager(Destination.module),
)
.join(TImports.dataset, isouter=True)
.join(TImports.authors, isouter=True)
.join(Destination)
.join(TModules)
Expand Down Expand Up @@ -165,13 +157,10 @@ def upload_file(scope, imprt, destination=None): # destination is set when impr
Add an import or update an existing import.
:form file: file to import
:form int datasetId: dataset ID to which import data
"""
if imprt:
if not imprt.has_instance_permission(scope, action_code="C"):
raise Forbidden
if not imprt.dataset.active:
raise Forbidden("Le jeu de données est fermé.")
destination = imprt.destination
else:
assert destination
Expand All @@ -187,22 +176,7 @@ def upload_file(scope, imprt, destination=None): # destination is set when impr
if size == 0:
raise BadRequest(description="Impossible to upload empty files")
if imprt is None:
try:
dataset_id = int(request.form["datasetId"])
except ValueError:
raise BadRequest(description="'datasetId' must be an integer.")
dataset = db.session.get(TDatasets, dataset_id)
if dataset is None:
raise BadRequest(description=f"Dataset '{dataset_id}' does not exist.")
ds_scope = get_scopes_by_action(
module_code=destination.module.module_code,
object_code="ALL", # TODO object_code should be configurable by destination
)["C"]
if not dataset.has_instance_permission(ds_scope):
raise Forbidden(description="Vous n’avez pas les permissions sur ce jeu de données.")
if not dataset.active:
raise Forbidden("Le jeu de données est fermé.")
imprt = TImports(destination=destination, dataset=dataset)
imprt = TImports(destination=destination)
imprt.authors.append(author)
db.session.add(imprt)
else:
Expand All @@ -226,8 +200,6 @@ def upload_file(scope, imprt, destination=None): # destination is set when impr
def decode_file(scope, imprt):
if not imprt.has_instance_permission(scope, action_code="C"):
raise Forbidden
if not imprt.dataset.active:
raise Forbidden("Le jeu de données est fermé.")
if imprt.source_file is None:
raise BadRequest(description="A file must be first uploaded.")
if "encoding" not in request.json:
Expand Down Expand Up @@ -292,8 +264,6 @@ def decode_file(scope, imprt):
def set_import_field_mapping(scope, imprt):
if not imprt.has_instance_permission(scope, action_code="C"):
raise Forbidden
if not imprt.dataset.active:
raise Forbidden("Le jeu de données est fermé.")
try:
FieldMapping.validate_values(request.json)
except ValueError as e:
Expand All @@ -309,8 +279,6 @@ def set_import_field_mapping(scope, imprt):
def load_import(scope, imprt):
if not imprt.has_instance_permission(scope, action_code="C"):
raise Forbidden
if not imprt.dataset.active:
raise Forbidden("Le jeu de données est fermé.")
if imprt.source_file is None:
raise BadRequest(description="A file must be first uploaded.")
if imprt.fieldmapping is None:
Expand Down Expand Up @@ -404,8 +372,6 @@ def get_import_values(scope, imprt):
def set_import_content_mapping(scope, imprt):
if not imprt.has_instance_permission(scope, action_code="C"):
raise Forbidden
if not imprt.dataset.active:
raise Forbidden("Le jeu de données est fermé.")
try:
ContentMapping.validate_values(request.json)
except ValueError as e:
Expand All @@ -424,8 +390,6 @@ def prepare_import(scope, imprt):
"""
if not imprt.has_instance_permission(scope, action_code="C"):
raise Forbidden
if not imprt.dataset.active:
raise Forbidden("Le jeu de données est fermé.")

# Check preconditions to execute this action
if not imprt.loaded:
Expand Down Expand Up @@ -627,8 +591,6 @@ def import_valid_data(scope, imprt):
"""
if not imprt.has_instance_permission(scope, action_code="C"):
raise Forbidden
if not imprt.dataset.active:
raise Forbidden("Le jeu de données est fermé.")
if not imprt.processed:
raise Forbidden("L’import n’a pas été préalablement vérifié.")
transient_table = imprt.destination.get_transient_table()
Expand Down Expand Up @@ -662,8 +624,6 @@ def delete_import(scope, imprt):
"""
if not imprt.has_instance_permission(scope, action_code="C"):
raise Forbidden
if not imprt.dataset.active:
raise Forbidden("Le jeu de données est fermé.")
ImportUserError.query.filter_by(imprt=imprt).delete()
transient_table = imprt.destination.get_transient_table()
db.session.execute(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,9 +47,6 @@ <h4 style="color: #666">
</li>

{% endfor %}
{% if data.dataset: %}
<li><b>Jeu de données</b> : {{ data.dataset.dataset_name }}</li>
{% endif %}
</ul>
{% if data.keywords: %}
<div class="information">
Expand All @@ -74,7 +71,7 @@ <h4 style="color: #666">
</div>
{% endif %}


{% if data.date_end_import is not none %}
<div class="row card">
<div class="card-header">
Expand All @@ -97,7 +94,7 @@ <h4 style="color: #666">
{% endfor %}
</tbody>
</table>

<div class="charts">
<img
class="charts__img"
Expand All @@ -106,7 +103,7 @@ <h4 style="color: #666">
</div>

</div>

</div>
{% endif %}
{% if data.processed %}
Expand Down Expand Up @@ -137,7 +134,7 @@ <h4 style="color: #666">
</table>
</div>
</div>

{% endif %} <!-- Endif import.processed-->


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ <h4 class="modal-title pull-left">Suppression</h4>
</div>
<div class="modal-body">
<p>
Supprimer cet import associé au JDD "{{ row.dataset.dataset_name }}" et commencé le
Supprimer cet import commencé le
{{ row.date_create_import }}?
</p>
<p *ngIf="row.date_end_import">Attention : cela supprimera aussi les données importées.</p>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,6 @@ <h3 class="underlined">Description de l'import</h3>
<b>Nom du fichier :</b>
{{ importData.full_file_name }}
</p>
<p>
<b>Jeu de données :</b>
{{ importData.dataset.dataset_name }}
</p>
<p>
<b>Date de soumission de l'import :</b>
{{ importData.date_create_import | date: 'dd/MM/yyyy' }}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -89,14 +89,6 @@ <h5 class="card-title">Liste des imports</h5>
"
>
<ng-container [ngSwitch]="col.prop">
<ng-container *ngSwitchCase="'dataset.dataset_name'">
<a
routerLink="/metadata/dataset_detail/{{ row.id_dataset }}"
matTooltip="Voir la fiche du jeu de données"
>
{{ row.dataset ? row.dataset.dataset_name : '' }}
</a>
</ng-container>
<ng-container *ngSwitchCase="'date_create_import'">
{{ row.date_create_import | date: 'dd-MM-yyyy' }}
</ng-container>
Expand Down Expand Up @@ -202,7 +194,7 @@ <h5 class="card-title">Liste des imports</h5>
ngx-datatable-cell-template
>
<button
[disabled]="row.processing || !row?.cruved?.U || !row?.dataset?.active"
[disabled]="row.processing || !row?.cruved?.U"
[matTooltip]="getTooltip(row, 'edit')"
mat-icon-button
color="primary"
Expand Down Expand Up @@ -237,7 +229,7 @@ <h5 class="card-title">Liste des imports</h5>
></i>
</button>
<button
[disabled]="!row?.cruved?.D || !row?.dataset?.active"
[disabled]="!row?.cruved?.D"
mat-icon-button
color="warn"
class="Datatable__button"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -155,10 +155,6 @@ export class ImportListComponent implements OnInit {
this.importProcessService.continueProcess(data);
}

onViewDataset(row: Import) {
this._router.navigate([`metadata/dataset_detail/${row.id_dataset}`]);
}

downloadSourceFile(row: Import) {
this._ds.setDestination(row.destination.code);
this._ds.downloadSourceFile(row.id_import).subscribe((result) => {
Expand Down Expand Up @@ -204,8 +200,6 @@ export class ImportListComponent implements OnInit {
getTooltip(row, tooltipType) {
if (!row?.cruved?.U) {
return "Vous n'avez pas les droits";
} else if (!row?.dataset?.active) {
return 'JDD clos';
} else if (tooltipType === 'edit') {
return "Modifier l'import";
} else {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,6 @@
<h5 class="HeaderStepper">
<span id="destination">{{ infoBox.destinationName }}</span>
</h5>
<h5
*ngIf="infoBox.destinationDatasetName"
class="HeaderStepper"
>
<span>{{ 'Import.DestinatinationDatasetLabel' | translate }}:</span>
<span id="dataset">{{ infoBox.destinationDatasetName }}</span>
</h5>

<h5
*ngIf="infoBox.fileName"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,6 @@
padding-left: 0.2em;
font-size: 1.5rem;
}
#dataset {
padding-left: 0.2em;
color: var(--primary);
font-weight: 700;
}
#filename {
padding-left: 0.2em;
color: var(--secondary);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ import { ImportProcessService } from '../import-process.service';

interface ImportInfoBoxData {
destinationName: string;
destinationDatasetName?: string;
fileName: string;
}

Expand All @@ -23,42 +22,39 @@ export class HeaderStepperComponent implements OnInit, OnChanges {
) {}

ngOnChanges(changes: SimpleChanges): void {
this.updateDestinationDataset();
this.updateImportInfo();
}

ngOnInit() {
this._route.params.subscribe((params) => {
this._importProcessService.importDataUpdated.subscribe(() => {
this.updateDestinationDataset();
this.updateImportInfo();
});
this.updateDestinationDataset(params);
this.updateImportInfo(params);
});
}

/**
* Updates the destination dataset based on the import data.
* Updates import infos.
*
* This function retrieves the import data from the import process service and uses it to fetch the dataset from the import data service.
* If a dataset is found, its name is assigned to the destinationDataset property.
* This function retrieves the import data from the import process service and uses it to update the info.
*
* @return {void}
*/
updateDestinationDataset(params?: any) {
updateImportInfo(params?: any) {
const importData = this._importProcessService.getImportData();
if (!importData && params) {
this._importDataService.getDestination(params['destination']).subscribe((dest) => {
this.infoBox = {
destinationDatasetName: undefined,
destinationName: dest.label,
fileName: undefined,
};
});
return;
}
if (!importData) return;
const { dataset, destination, full_file_name } = importData;
const { destination, full_file_name } = importData;
this.infoBox = {
destinationDatasetName: dataset?.dataset_name,
destinationName: destination.label,
fileName: full_file_name,
};
Expand Down
Loading

0 comments on commit 3d4f693

Please sign in to comment.