diff --git a/.gitignore b/.gitignore index 9d63e9a6..a978cd5f 100644 --- a/.gitignore +++ b/.gitignore @@ -223,4 +223,6 @@ cython_debug/ # IntelliJ like IDEs (e.g. PyCharm) .idea -tmp* \ No newline at end of file +tmp* +container/nease_events + diff --git a/container/domain/Process/nease_output.py b/container/domain/Process/nease_output.py index aae753fd..2d3b2483 100644 --- a/container/domain/Process/nease_output.py +++ b/container/domain/Process/nease_output.py @@ -1,3 +1,4 @@ +import json import pickle import traceback from io import StringIO @@ -8,6 +9,7 @@ from matplotlib import pyplot as plt +from domain.models import NeaseSaveLocationMapping from domain.nease import nease from domain.nease.process import webify_table from django.conf import settings @@ -16,12 +18,14 @@ images_path = os.path.join(settings.MEDIA_ROOT, 'images/') data_path = os.path.join(settings.MEDIA_ROOT, 'nease_tables/') nease_path = 'nease_events/' +# The subdirectories contain files saved for one week, one month, and six months. To be very lenient, we calculated every month with 31 days. +days_to_folder = {"0": nease_path+"zero_days/", "7": nease_path+"seven_days/", "31": nease_path+"thirtyone_days/", "186": nease_path+"onehundredeightysix_days/"} +default_path = days_to_folder["7"] -for path in [images_path, data_path, nease_path]: +for path in [images_path, data_path] + list(days_to_folder.values()): if not os.path.exists(path): os.makedirs(path) - # web_tables_options is a dictionary that contains the options for webifying the tables web_tables_options = { 'domains': {'link_col': ['Gene name', 'Gene stable ID', 'Exon stable ID', 'Pfam ID'], @@ -50,7 +54,7 @@ } -def run_nease(data, organism, params): +def run_nease(data, organism, params, file_name='', custom_name=''): run_id = str(uuid.uuid4()) image_path = images_path + run_id @@ -97,7 +101,8 @@ def run_nease(data, organism, params): value.drop(columns=['Unnamed: 0'], inplace=True) # save events to pickle - events.save(nease_path + run_id) + events.save(default_path + run_id) + NeaseSaveLocationMapping(run_id=run_id, saved_for_days=7, file_name=file_name, custom_name=custom_name).save() return events, info_tables, run_id @@ -126,7 +131,13 @@ def read_extra_spaces(file_obj): def get_nease_events(run_id): - events = nease.load(nease_path + run_id + '.pkl') + days = NeaseSaveLocationMapping.get_saved_for_days(run_id) + if days not in days_to_folder: + file_path = default_path + else: + file_path = days_to_folder[str(days)] + print(f"Loading events from {file_path + run_id + '.pkl'}") + events = nease.load(file_path + run_id + '.pkl') info_tables = {} try: domains = webify_table(pd.read_csv(f"{data_path}{run_id}_domains.csv"), web_tables_options['domains']) @@ -243,6 +254,27 @@ def create_plot(terms, pvalues, cut_off, filename): plt.clf() plt.close() +def change_save_timing(run_id, days): + mapping = NeaseSaveLocationMapping.objects.get(run_id=run_id) + current_days_folder = mapping.get_number_of_saved_for_days() + if days not in days_to_folder: + new_file_path = default_path + else: + new_file_path = days_to_folder[str(days)] + if current_days_folder not in days_to_folder: + old_file_path = default_path + else: + old_file_path = days_to_folder[str(current_days_folder)] + # move the file + os.rename(old_file_path + run_id + '.pkl', new_file_path + run_id + '.pkl') + # update the database + mapping.saved_for_days = int(days) + mapping.save() + + return json.dumps( + {"logmessage": "Changing the save timing from " + str(current_days_folder) + " to " + str(days) + " was successful.", + "days_left": mapping.days_left()} + ) def match_name_with_format(filename): name_matches = {'deltapsi': 'MAIJQ', diff --git a/container/domain/migrations/0001_initial.py b/container/domain/migrations/0001_initial.py new file mode 100644 index 00000000..b3ff3ff7 --- /dev/null +++ b/container/domain/migrations/0001_initial.py @@ -0,0 +1,38 @@ +# Generated by Django 2.2.28 on 2024-09-09 12:32 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ] + + operations = [ + migrations.CreateModel( + name='Domain', + fields=[ + ('pfam_id', models.CharField(db_index=True, max_length=10, primary_key=True, serialize=False)), + ('symbol', models.CharField(max_length=20)), + ('description', models.CharField(max_length=150)), + ], + ), + migrations.CreateModel( + name='Gene', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('ensembl_id', models.CharField(max_length=20)), + ('gene_symbol', models.CharField(db_index=True, max_length=20)), + ], + ), + migrations.CreateModel( + name='NeaseSaveLocationMapping', + fields=[ + ('run_id', models.CharField(db_index=True, max_length=36, primary_key=True, serialize=False)), + ('saved_for_days', models.IntegerField()), + ('date_of_creation', models.DateTimeField(auto_now_add=True)), + ], + ), + ] diff --git a/container/domain/migrations/0002_neasesavelocationmapping_name.py b/container/domain/migrations/0002_neasesavelocationmapping_name.py new file mode 100644 index 00000000..d9b67b54 --- /dev/null +++ b/container/domain/migrations/0002_neasesavelocationmapping_name.py @@ -0,0 +1,18 @@ +# Generated by Django 2.2.28 on 2024-09-09 13:22 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('domain', '0001_initial'), + ] + + operations = [ + migrations.AddField( + model_name='neasesavelocationmapping', + name='name', + field=models.CharField(default='', max_length=255), + ), + ] diff --git a/container/domain/migrations/0003_auto_20240910_1357.py b/container/domain/migrations/0003_auto_20240910_1357.py new file mode 100644 index 00000000..d3bab534 --- /dev/null +++ b/container/domain/migrations/0003_auto_20240910_1357.py @@ -0,0 +1,27 @@ +# Generated by Django 2.2.28 on 2024-09-10 11:57 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('domain', '0002_neasesavelocationmapping_name'), + ] + + operations = [ + migrations.RemoveField( + model_name='neasesavelocationmapping', + name='name', + ), + migrations.AddField( + model_name='neasesavelocationmapping', + name='custom_name', + field=models.CharField(default='', max_length=255), + ), + migrations.AddField( + model_name='neasesavelocationmapping', + name='file_name', + field=models.CharField(default='', max_length=255), + ), + ] diff --git a/container/domain/migrations/__init__.py b/container/domain/migrations/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/container/domain/models.py b/container/domain/models.py index e80be743..794fd663 100644 --- a/container/domain/models.py +++ b/container/domain/models.py @@ -1,4 +1,5 @@ from django.db import models +from django.utils import timezone class Gene(models.Model): @@ -16,3 +17,28 @@ class Domain(models.Model): #def __str__(self): # return f"Pfam: {self.pfam_id} - Symbol: {self.symbol}" + +class NeaseSaveLocationMapping(models.Model): + run_id = models.CharField(max_length=36, primary_key=True, db_index=True) + saved_for_days = models.IntegerField() + date_of_creation = models.DateTimeField(auto_now_add=True) + file_name = models.CharField(max_length=255, default='') + custom_name = models.CharField(max_length=255, default='') + + # Method to query the database for the run_id and return the saved_for_days + @staticmethod + def get_saved_for_days(run_id): + return str(NeaseSaveLocationMapping.objects.get(run_id=run_id).saved_for_days) + + def get_number_of_saved_for_days(self): + return str(self.saved_for_days) + + # Calculate how many days are left until deletion, negative values are set to 0 + def days_left(self): + return max(0, self.saved_for_days - (timezone.now() - self.date_of_creation).days) + + # Get the custom name and return None if it is empty + def get_custom_name(self): + if self.custom_name == '': + return None + return self.custom_name diff --git a/container/domain/static/domain/initStore.js b/container/domain/static/domain/initStore.js index 9f4afc85..1147d361 100644 --- a/container/domain/static/domain/initStore.js +++ b/container/domain/static/domain/initStore.js @@ -100,7 +100,7 @@ const newValue = value == null ? null - : { value, expiresAt: getExpiration(expiresInDays), name } + : { value, expiresAt: getExpiration(expiresInDays), name , createdAt: Date.now()} setExpiration(newValue?.expiresAt) storage.set(prefixedKey, newValue) }, diff --git a/container/domain/static/domain/loadStore.js b/container/domain/static/domain/loadStore.js index ffc3f6a3..4536d9d6 100644 --- a/container/domain/static/domain/loadStore.js +++ b/container/domain/static/domain/loadStore.js @@ -16,16 +16,16 @@ function removeExpiredData(key) { } // Function to create an HTML template -function createHtmlTemplate(data, expiresInDays = 7) { +function createHtmlTemplate(data) { // subtract 7 days from the expiry date - const createdDate = new Date(data.expiresAt) - (expiresInDays * 24 * 60 * 60 * 1000); + const createdDate = new Date(data.createdAt); // format to readable date const formattedDate = new Date(createdDate).toLocaleString(); return `
-
+
${data.name}

@@ -49,8 +49,7 @@ function appendTemplateToDiv(template, divId) { } } -function prevAnalysis(id, name) { +function prevAnalysis(id) { document.getElementById('previous_analyses_input').value = id; - document.getElementById('previous_analyses_name').value = name; document.getElementById('submit').click(); } diff --git a/container/domain/static/image/Isoform_mode.svg b/container/domain/static/image/Isoform_mode.svg index c4dff247..17ee6be3 100644 --- a/container/domain/static/image/Isoform_mode.svg +++ b/container/domain/static/image/Isoform_mode.svg @@ -2,23 +2,23 @@ + inkscape:export-ydpi="72.18045" + xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" + xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" + xmlns="http://www.w3.org/2000/svg" + xmlns:svg="http://www.w3.org/2000/svg" + xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" + xmlns:cc="http://creativecommons.org/ns#" + xmlns:dc="http://purl.org/dc/elements/1.1/"> + inkscape:guide-bbox="true" + inkscape:showpageshadow="2" + inkscape:pagecheckerboard="0" + inkscape:deskcolor="#d1d1d1"> + - Isoform-specific interactions - + + Isoform 1 - Isoform 1 + Isoform 2 - - - - - - - + style="font-style:normal;font-weight:normal;font-size:3.10125px;line-height:1.25;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.18803" + xml:space="preserve">Isoform 2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + style="fill:none;stroke:#808080;stroke-width:0.2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:0.8, 0.8;stroke-dashoffset:0;stroke-opacity:1" + d="m 130.24513,223.37061 c 19.79366,0 19.79366,0 19.79366,0" + id="path31610-7" + inkscape:connector-curvature="0" /> + Missing interaction + Interacting domain + style="fill:none;stroke:#808080;stroke-width:0.2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" + d="m 87.071148,223.25988 h 12.133839 v 0" + id="path31783" + inkscape:connector-curvature="0" /> - - - - - - - - - - - - - - - - - - - - - - - - - + inkscape:transform-center-x="4.2110024" + inkscape:transform-center-y="4.4165459" + transform="matrix(-0.00149698,-0.18284705,-0.24250748,0.01757938,133.69873,230.22539)" /> - - Missing interaction - Interacting domain - - + id="flowPara11145" /> + diff --git a/container/domain/static/image/exon_page.svg b/container/domain/static/image/exon_page.svg index 5f37defe..e60fd152 100644 --- a/container/domain/static/image/exon_page.svg +++ b/container/domain/static/image/exon_page.svg @@ -2,23 +2,23 @@ + inkscape:export-ydpi="72.18045" + xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" + xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" + xmlns="http://www.w3.org/2000/svg" + xmlns:svg="http://www.w3.org/2000/svg" + xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" + xmlns:cc="http://creativecommons.org/ns#" + xmlns:dc="http://purl.org/dc/elements/1.1/"> + inkscape:guide-bbox="true" + inkscape:showpageshadow="2" + inkscape:pagecheckerboard="0" + inkscape:deskcolor="#d1d1d1" /> @@ -49,7 +52,6 @@ image/svg+xml - @@ -69,327 +71,318 @@ height="400.71429" x="214.28572" y="571.80542" /> Interacting residue + + mapped to the exon - Interacting residue mapped to the exon + Interacting domainInteracting domainmapped to the exon - Exon-specific features - - + style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:3.175px;font-family:Arial;-inkscape-font-specification:Arial">mapped to the exon + + + + + + + + + + + + + + + + + + + + + + style="fill:none;stroke:#808080;stroke-width:0.2;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" + d="m 85.840326,227.66769 h 8.83591 v 0" + id="path31783" + inkscape:connector-curvature="0" /> - - - - - - - - - - - - - - - - + + inkscape:randomized="0" + d="m 124.85844,145.64378 -0.62814,0.31001 -0.10072,0.69319 -0.48894,-0.5016 -0.69039,0.11841 0.32596,-0.62001 -0.32596,-0.62001 0.69039,0.11842 0.48894,-0.5016 0.10072,0.69319 z" + inkscape:transform-center-x="-0.48595062" + inkscape:transform-center-y="-0.049638618" + transform="matrix(2.5493272,0,0,2.5430654,-176.45893,-143.9927)" /> - - - diff --git a/container/domain/static/image/nease_page.png b/container/domain/static/image/nease_page.png index 792e5789..7de7df7b 100644 Binary files a/container/domain/static/image/nease_page.png and b/container/domain/static/image/nease_page.png differ diff --git a/container/domain/static/image/net_page.svg b/container/domain/static/image/net_page.svg index 52d09dc0..fc834c44 100644 --- a/container/domain/static/image/net_page.svg +++ b/container/domain/static/image/net_page.svg @@ -2,23 +2,23 @@ + inkscape:export-ydpi="72.18045" + xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" + xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" + xmlns="http://www.w3.org/2000/svg" + xmlns:svg="http://www.w3.org/2000/svg" + xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" + xmlns:cc="http://creativecommons.org/ns#" + xmlns:dc="http://purl.org/dc/elements/1.1/"> + inkscape:guide-bbox="true" + inkscape:showpageshadow="2" + inkscape:pagecheckerboard="0" + inkscape:deskcolor="#d1d1d1"> + - Subnetwork of multiple isoforms - - + + - + - + Multiple proteinMultiple protein variants - - - - - - - - - - - - - - - - - + style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:3.52778px;font-family:Arial;-inkscape-font-specification:Arial;stroke-width:0.264583"> variants + + + + + + + + + + + + + + + + + + diff --git a/container/domain/templates/setup/nease_setup.html b/container/domain/templates/setup/nease_setup.html index 64928a3b..5207f765 100644 --- a/container/domain/templates/setup/nease_setup.html +++ b/container/domain/templates/setup/nease_setup.html @@ -219,7 +219,6 @@
Advanced Options
diff --git a/container/domain/templates/visualization/nease_result.html b/container/domain/templates/visualization/nease_result.html index 1ac7ce17..094ca033 100644 --- a/container/domain/templates/visualization/nease_result.html +++ b/container/domain/templates/visualization/nease_result.html @@ -91,6 +91,7 @@ } } + @@ -140,6 +141,11 @@

NEASE summary


+ {% if custom_name %} +
Analysis name: {{ custom_name|safe }} +
+ {% endif %}
Input file: {{ input_name|safe }}
@@ -157,8 +163,109 @@
Affected protein interactions/bindings: {{ interaction_affected|safe }} -
+
+
+ + +
+ +
Link for sharing: + + +
+
@@ -438,9 +545,12 @@
const nease_databases = [ {% for db in nease_dbs %} "{{ db|safe }}", {% endfor %} ] - // save the runId in local storage so that it can be accessed for another 7 days - const userStore = window.initStore({key: runId, expiresInDays: 7, name: fileName}) - userStore.set(runId) + //check if runId is in local storage + if (localStorage.getItem("nease/"+runId) === null){ + // if it is not in local storage, then set it + const userStore = window.initStore({key: runId, expiresInDays: {{ time_left|safe }}, name: fileName}) + userStore.set(runId) + } // enable tooltips $(function () { diff --git a/container/domain/tmp.py b/container/domain/tmp.py deleted file mode 100644 index 091fc07e..00000000 --- a/container/domain/tmp.py +++ /dev/null @@ -1,202 +0,0 @@ -import os.path -import pickle -import re -import networkx as nx -import mygene -import pandas as pd -from sqlalchemy import text -from sqlalchemy import create_engine - - -def load_obj(name): - with open('data/' + name + '.pkl', 'rb') as f: - return pickle.load(f) - - -def vis_node_(node, DomainG): - DomainG = DomainG - G = nx.Graph() - if DomainG.has_node(node): - # copy over the edges with their attributes - for edge in DomainG.edges(node, data=True): - G.add_edge(edge[0], edge[1], **edge[2]) - - # get the amount of edges that are predicted and how many are original - predicted = 0 - original = 0 - for edge in G.edges(data=True): - if edge[2]['origin'] == 'predicted': - predicted += 1 - else: - original += 1 - print(f"Node {node} has {predicted} predicted edges and {original} original edges") - - g = nx.Graph() - g.add_node(node) - for n in G.nodes(): - domain = n.split("/")[1] - if n != node: - # add predicted attribute if it is predicted in G - if (G.has_edge(n, node) or G.has_edge(node, n)) \ - and (G[node][n]['origin'] == 'predicted' or G[n][node]['origin'] == 'predicted'): - if not g.has_edge(n, domain): - g.add_edge(n, domain, origin='predicted') - if not g.has_edge(node, domain): - g.add_edge(node, domain, origin='predicted') - else: - g.add_edge(n, domain, origin='original') - g.add_edge(node, domain, origin='original') - - # get the amount of edges that are predicted and how many are original - predicted = 0 - original = 0 - for edge in g.edges(data=True): - if edge[2]['origin'] == 'predicted': - predicted += 1 - else: - original += 1 - - print(f"Node {node} has {predicted} predicted edges and {original} original edges") - - -def graph_test(DomainG, node): - G = nx.Graph() - edge_attr = set() - for edge in DomainG.edges(data=True): - edge_attr.add(edge[2]['confidence']) - print(edge_attr) - if DomainG.has_node(node): - G.add_edges_from(DomainG.edges(node, data=True)) - - # copy over the edges with their attributes - # for edge in DomainG.edges(node, data=True): - # G.add_edge(edge[0], edge[1], **edge[2]) - # check if edge data is there - print(G.edges(data=True)) - return G - - g = nx.Graph() - g.add_node(node) - for n in G.nodes(): - domain = n.split("/")[1] - if n == node: - continue - # add predicted attribute if it is predicted in G - if G.has_edge(n, node) and (G[node][n]['confidence'] != 'original' or G[n][node]['confidence'] != 'original'): - print(G[node][n]['confidence'], G[n][node]['confidence']) - if not g.has_edge(n, domain): - g.add_edge(n, domain, confidence=G[n][node]['confidence']) - if not g.has_edge(node, domain): - g.add_edge(node, domain, confidence=G[n][node]['confidence']) - - else: - g.add_edge(n, domain, confidence='original') - g.add_edge(node, domain, confidence='original') - - -# returns true if a graph1 is a subgraph of graph2 -def is_subgraph(graph1, graph2): - for i in graph1.nodes(): - if i not in graph2.nodes(): - return False - for i in graph1.edges(): - if i not in graph2.edges(): - return False - return True - - -# get node and edge overlap between two graphs -def graph_overlap(graph1, graph2): - node_overlap = 0 - for i in graph1.nodes(): - if i in graph2.nodes(): - node_overlap += 1 - edge_overlap = 0 - for i in graph1.edges(): - if i in graph2.edges(): - edge_overlap += 1 - return node_overlap, edge_overlap - - -def remove_nan_nodes(graph): - for node in list(graph.nodes): # Create a copy of the nodes - if "nan" in node: - graph.remove_node(node) - return graph - - -def extract_confidence(domainG): - confidences = set() - for edge in domainG.edges(data=True): - confidences.add(edge[2].get('confidence', 'original')) - print(confidences) - - -def Ensemb_to_entrez(genes, organsim='human'): - mg = mygene.MyGeneInfo() - out = mg.querymany(genes, scopes="ensembl.gene", fields='entrezgene', species=organsim, verbose=False) - translated = {} - for result in out: - if 'entrezgene' in result: - translated[result['query']] = result['entrezgene'] - return translated - - -def co_partner_egdes(Ensemble_transID, organism): - # create connection to database - engine = create_engine('postgresql://postgres:postgres@172.19.0.3:5432/postgres') - - N = [] - exons_in_interface = [] - # p1=PPI[ PPI['Transcript stable ID_x']==Ensemble_transID] - # p2=PPI[ PPI['Transcript stable ID_y']==Ensemble_transID] - - partners = [] - - query = """ - SELECT * - FROM ppi_data_""" + organism + """ - WHERE "Transcript stable ID_x"=:ensemble_trans_id - """ - tr_1 = pd.read_sql_query(sql=text(query), con=engine, params={'ensemble_trans_id': Ensemble_transID}) - - partners = tr_1['Transcript stable ID_y'].unique().tolist() - - query = """ - SELECT * - FROM ppi_data_""" + organism + """ - WHERE "Transcript stable ID_y"=:ensemble_trans_id - """ - tr_2 = pd.read_sql_query(sql=text(query), con=engine, params={'ensemble_trans_id': Ensemble_transID}) - - co_partners = [] - if len(partners) != 0: - partners = list(set(partners + tr_2['Transcript stable ID_x'].unique().tolist())) - print(partners) - - -if __name__ == '__main__': - pass - - co_partner_egdes('ENST00000286548', 'human') - - # ppi_graph: nx.Graph = pickle.load(open('../domain/data/Homo sapiens[human]/DomainG.pkl', 'rb')) - # print(len(ppi_graph.nodes)) - # ppi_graph = remove_nan_nodes(ppi_graph) - # print(len(ppi_graph.nodes)) - # pickle.dump(ppi_graph, open('../domain/data/Homo sapiens[human]/DomainG_up.pkl', 'wb')) - - # edges_domainV = {'test': [1,2,3,4], 'original': [1,2,3,4]} - # print(len(edges_domainV.get('original')) > 70 if edges_domainV.get('original') else True) - # # load human DDI graph - # DomainG_human = pickle.load(open('data/Homo sapiens[human]/DomainG.pkl', 'rb')) - # extract_confidence(DomainG_human) - # DomainG_mouse = pickle.load(open('data/Mus musculus[mouse]/DomainG.pkl', 'rb')) - # print(len(DomainG_mouse.edges), len(DomainG_mouse.nodes)) - # - # # check if graphs aver overlapping - # print(is_subgraph(DomainG_mouse, DomainG_human)) - # node, edge = graph_overlap(DomainG_mouse, DomainG_human) - # print(f"Edges in both: {edge}, Nodes in both: {node}") - # #domaing = pickle.load(open('data/Homo sapiens[human]/DomainG.pkl', 'rb')) - # #graph_test(domaing, '10114/PF00069') diff --git a/container/domain/views.py b/container/domain/views.py index 851ce040..c93e40f7 100644 --- a/container/domain/views.py +++ b/container/domain/views.py @@ -12,6 +12,7 @@ from django.conf import settings from django.http import HttpResponse, JsonResponse from django.shortcuts import render, redirect +from django.urls import reverse from django.utils.html import escape from io import StringIO @@ -26,6 +27,7 @@ from .Process import mutliple_query as mq from .Process import process_data as proc_data from .Process import nease_output as no +from .models import NeaseSaveLocationMapping # --- Create folder # Global jobs path @@ -682,10 +684,14 @@ def Multi_proteins(request, organism, job='0'): return render(request, 'visualization/network.html', context) -def set_previous_analysis(request): - print("got previous analysis with run ID:", request.POST.get('previousAnalysis')) +def set_previous_analysis(request, post_request=True): + if post_request: + run_id = request.POST.get('previousAnalysis') + else: + run_id = request.GET.get('runId') + print("got previous analysis with run ID:", run_id) try: - events, info_tables = no.get_nease_events(request.POST.get('previousAnalysis')) + events, info_tables = no.get_nease_events(run_id) except FileNotFoundError: context = {'error_msg': "Could not find this analysis, please run it again."} @@ -695,17 +701,23 @@ def set_previous_analysis(request): context = {'error_msg': str(e)} return render(request, 'setup/nease_setup.html', context) - run_id = request.POST.get('previousAnalysis') - for key, value in info_tables.items(): info_tables[key] = value.to_html(table_id=f"{key}_table", **settings.TO_HTML_RESPONSIVE_PARAMETERS) + save_info = NeaseSaveLocationMapping.objects.get(run_id=run_id) + current_duration = save_info.saved_for_days + time_left = save_info.days_left() + context = { - 'input_name': request.POST.get('previousName'), + 'input_name': save_info.file_name, + 'custom_name': save_info.get_custom_name(), **events.summary, **info_tables, 'stats': run_id + ".jpg", + 'shareable_link': request.build_absolute_uri(reverse('nease-analysis')) + "?runId=" + run_id, 'run_id': run_id, + 'current_duration': current_duration, + 'time_left': time_left, **events.get_databases() } try: @@ -715,12 +727,15 @@ def set_previous_analysis(request): traceback.print_exc() return render(request, 'setup/nease_setup.html', context) - # this does the initial nease run or loads a previous analysis def setup_nease(request): # handle previous analysis if request.POST.get('previousAnalysis', None): - return set_previous_analysis(request) + return set_previous_analysis(request, True) + + # handle previous analysis as a get request + if request.GET.get('runId', None): + return set_previous_analysis(request, False) # otherwise continue with new analysis if not request.FILES: @@ -780,18 +795,27 @@ def setup_nease(request): 'min_delta': min_delta, 'majiq_confidence': majiq_confidence, 'only_ddis': only_ddis, - 'confidences': confidences}) + 'confidences': confidences}, + input_data['splicing-events-file'].name, + custom_name) for key, value in info_tables.items(): info_tables[key] = value.to_html(table_id=f"{key}_table", **settings.TO_HTML_RESPONSIVE_PARAMETERS) + save_info = NeaseSaveLocationMapping.objects.get(run_id=run_id) + current_duration = save_info.saved_for_days + time_left = save_info.days_left() + context = { 'input_name': input_data['splicing-events-file'].name, 'custom_name': custom_name, **events.summary, **info_tables, 'stats': run_id + ".jpg", + 'shareable_link': request.build_absolute_uri(reverse('nease-analysis')) + "?runId=" + run_id, 'run_id': run_id, + 'current_duration': current_duration, + 'time_left': time_left, **events.get_databases(), } return render(request, 'visualization/nease_result.html', context) @@ -810,18 +834,17 @@ def setup_nease(request): context['error_msg'] = error_out return render(request, 'setup/nease_setup.html', context) - # extra functions for the NEASE output once the analysis is done def nease_extra_functions(request): function_name = request.GET.get('func', None) if not function_name: return HttpResponse("No function provided", status=400) run_id = request.GET.get('runId', None) + if not run_id: + return HttpResponse("No run ID provided", status=400) databases = request.GET.get('databases', None) pathway = request.GET.get('pathway', None) k = request.GET.get('k', None) - if not run_id: - return HttpResponse("No run ID provided", status=400) if databases: databases = databases.split(",") @@ -840,6 +863,11 @@ def nease_extra_functions(request): table_name = "path" elif function_name == 'visualise': out_table = no.visualise_path(no.get_nease_events(run_id), pathway, k) + elif function_name == 'save': + duration = request.GET.get('duration', None) + if not duration: + return HttpResponse("No duration provided", status=400) + out_table = no.change_save_timing(run_id, duration) else: return HttpResponse(f"Unknown function: {function_name}", status=400) except Exception as e: diff --git a/docker/web/auto_clean.sh b/docker/web/auto_clean.sh index f7e797ca..e2c40ab2 100644 --- a/docker/web/auto_clean.sh +++ b/docker/web/auto_clean.sh @@ -1,16 +1,28 @@ #!/bin/bash -# This script is used to clean the files in the /code/nease_events, /code/run/media directories + any subdirectory -# after 7 days since the last access time. +# This script is used to clean the files in the subdirectories of /code/nease_events, /code/run/media directories + any subdirectory # The script is scheduled to run every day at 00:00 using a cron job -# check if theres an environment variable for the time to delete files -DELETE_AFTER=${DELETE_AFTER:-7} +# define the different folders that should be cleaned +FOLDER_ONE="zero_days" +FOLDER_TWO="seven_days" +FOLDER_THREE="thirtyone_days" +FOLDER_FOUR="onehundredeightysix_days" +FOLDERS=($FOLDER_ONE $FOLDER_TWO $FOLDER_THREE $FOLDER_FOUR) +# define the number of days after which the files should be deleted +ONE_DELETE_AFTER=0 +TWO_DELETE_AFTER=7 +THREE_DELETE_AFTER=31 +FOUR_DELETE_AFTER=186 +DELETION_TIMES=($ONE_DELETE_AFTER $TWO_DELETE_AFTER $THREE_DELETE_AFTER $FOUR_DELETE_AFTER) -# Find and delete files in /code/nease_events directory -echo "Found $(find /code/nease_events -type f -atime +$DELETE_AFTER | wc -l) files to delete" -find /code/nease_events -type f -atime +$DELETE_AFTER -delete +# Loop over the folders and deletion times +for i in "${!FOLDERS[@]}"; do + # Find and delete files in the corresponding /code/nease_events directory + echo "Found $(find /code/nease_events/${FOLDERS[$i]} -type f -mtime +${DELETION_TIMES[$i]} | wc -l) files to delete, as they are older than ${DELETION_TIMES[$i]} days." + find /code/nease_events/${FOLDERS[$i]} -type f -mtime +${DELETION_TIMES[$i]} -delete +done # Find and delete files in /code/run/media directory -echo "Found $(find /code/run/media -type f -atime +$DELETE_AFTER | wc -l) files to delete" -find /code/run/media -type f -atime +$DELETE_AFTER -delete +echo "Found $(find /code/run/media -type f -mtime +FOUR_DELETE_AFTER | wc -l) files to delete" +find /code/run/media -type f -mtime +FOUR_DELETE_AFTER -delete