Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Local simulations #23

Open
wants to merge 33 commits into
base: master
Choose a base branch
from
Open
Changes from 9 commits
Commits
Show all changes
33 commits
Select commit Hold shift + click to select a range
812c372
added hexamer extended+multi walker simulation template
Apr 3, 2023
723f470
added hexamer helix simulation template files and updated some .gitig…
Apr 3, 2023
b6685bd
added a global gitignore to prevent commits of signac workspace and v…
Apr 3, 2023
e1cd25e
added octamer helix, extended and extended bespokefit simulations, th…
Apr 3, 2023
7079d5e
updated extended simulations to try different sigma values
Apr 3, 2023
5664f2d
updated init and analysis scripts for extended mw bespoke metad simul…
Apr 3, 2023
cf1f530
moved some replica exchange trajectory files to PL library to make ro…
Apr 5, 2023
b2800f5
added function for adding files to metadynamics simulations directories
Apr 5, 2023
7ee5bf4
added all the files to assign parameters for the helical peptide, min…
tlfobe Apr 10, 2023
8a89f66
added necesary files for REMD simulation, also updated scripts to inc…
tlfobe Apr 11, 2023
0eac850
fixed submit scripts, old scripts used a restrained topology file
Apr 11, 2023
e24acdf
merged two branches so that simulations can run from both parts
Apr 11, 2023
d760d68
added git ignore to remove sim directories from REMD
Apr 11, 2023
dec12c7
added ions to solvation box
tlfobe Apr 20, 2023
842ee30
Merge branch 'helical_peptide' of https://github.com/shirtsgroup/hete…
tlfobe Apr 20, 2023
85c63df
merged home and work versions of this PR
tlfobe Apr 20, 2023
a8467dc
merged with bridges2 remote version of branch, now includes ions and …
tlfobe Apr 20, 2023
082652f
updated peptide system to shorter version of helical peptide
tlfobe Apr 20, 2023
0a0434e
adding hmr files for shorter peptide sim
tlfobe Apr 20, 2023
726fcd8
most recent updates including shorter peptide
Apr 20, 2023
a6b4c9a
final merge of ions topology with short peptide
Apr 20, 2023
bd1a5f9
updated clustering scripts, there was a bug where I was clustering on…
Apr 21, 2023
be51fb4
added visualization scripts for remd helical peptide remd simulations…
Apr 21, 2023
875b59a
updated topology for control peptide system
tlfobe May 9, 2023
86b74bf
Merge branch 'helical_peptide' of https://github.com/shirtsgroup/hete…
tlfobe May 9, 2023
4a3254d
added metad simulation for helical peptide, created entry point for m…
May 18, 2023
3155a70
Merge branch 'helical_peptide' of https://github.com/shirtsgroup/hete…
May 18, 2023
cd979f8
changed import names and file system to terphenyl_simulations
May 18, 2023
cbc74b0
forgot to remove heteropolymer_simulations directory
May 18, 2023
f810e7b
added files for mop octamer metadynamics
May 18, 2023
4345319
Merge branch 'helical_peptide' of https://github.com/shirtsgroup/hete…
May 18, 2023
cfa1fd1
added single metadynamics walker with bash scripts to run on a local …
tlfobe May 24, 2023
52ada3f
added a local version of metad_analysis, would be good to have an opt…
tlfobe May 26, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
The diff you're trying to view is too large. We only load the first 3000 changed files.
57 changes: 55 additions & 2 deletions heteropolymer_simulations/scripts.py
Original file line number Diff line number Diff line change
@@ -8,7 +8,6 @@
from openff.toolkit.topology import FrozenMolecule, Molecule, Topology
from openff.toolkit.typing.engines.smirnoff import ForceField
from openff.interchange.components.interchange import Interchange
import pdb
import os
import argparse
import panedr
@@ -20,6 +19,9 @@
import shutil
import numpy as np
import sys
import shutil
import signac
from utils import replace_all_pattern


def renumber_pdb_atoms():
@@ -410,4 +412,55 @@ def REMD_setup():
else:
w.write(line)
for extra in args.extra_files:
shutil.copy(extra, os.path.join(sim_path, extra))
shutil.copy(extra, os.path.join(sim_path, extra))
def METAD_analysis():
"""
Script for running the metadynamics analysis workflow generalized to all versions
terphenyl oligomers.
"""

pass

def METAD_add_files():

def parse_args():
parser = argparse.ArgumentParser(
description = "A script to add files to signac projects",
)

parser.add_argument(
"-f", "--file_path",
type = str,
nargs = "+",
help = "files to add to signac projects"
)

parser.add_argument(
"-r", "--replace",
type = str,
nargs = "+",
help = "List of string needed to repalce in added files. \
Currently this only works for WALKER_DIRS and strings \
matching the statepoint variables."
)

return parser.parse_args()

args = parse_args()
project = signac.get_project()

for job in project.find_jobs():
for file_path, replace in zip(args.file_path, args.replace):
filename = file_path.split("/")[-1]
n_walkers = len(glob.glob(job.fn("WALKER*")))
if replace == "WALKER_DIRS":
walker_dirs = " ".join(["WALKER" + str(walker_id) for walker_id in range(n_walkers)])
shutil.copy(file_path, job.fn(filename))
replace_all_pattern("WALKER_DIRS", walker_dirs, job.fn(filename))
if replace in job.sp.keys():
walker_dirs = ["WALKER" + str(walker_id) for walker_id in range(n_walkers)]
for walker_dir in walker_dirs:
shutil.copy(file_path, job.fn(os.path.join(walker_dir, filename)))
replace_all_pattern(replace, str(job.sp[replace]), job.fn(os.path.join(walker_dir, filename)))


22 changes: 21 additions & 1 deletion heteropolymer_simulations/utils.py
Original file line number Diff line number Diff line change
@@ -4,7 +4,8 @@
from datetime import datetime
from rdkit import Chem
import platform
import numpy as np
import numpy as np
import re
import sys

class TopFileObject:
@@ -228,6 +229,25 @@ def get_angle_ids(universe, resname, angle_id, template_residue_i = 0):
return(angle_ids)


def replace_all_pattern(pattern, replace, file):
"""
Function to replace patterns present in with a particular string

Parameters
----------
pattern : string
string pattern to replace in file
replace : string
string that will replace the pattern found in file
file : string
path to file
"""
with open(file, "r") as f:
lines = f.readlines()
for i in range(len(lines)):
lines[i] = re.sub(pattern, replace, lines[i])
with open(file, "w") as f:
f.writelines(lines)

def main():
top = TopFileObject("test.top")
3 changes: 3 additions & 0 deletions simulations/terphenyl_mop/hexamer_metad/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
*workspace*
*view*

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
workspace/*
Original file line number Diff line number Diff line change
@@ -0,0 +1,250 @@
{
"cells": [
{
"cell_type": "markdown",
"id": "5080406b",
"metadata": {},
"source": [
"# Plumed Analysis Notebook\n",
"\n",
"Using this notebook to visualize and analyze the output from the H-bond metadynamics simulations"
]
},
{
"cell_type": "code",
"execution_count": 1,
"id": "0092406d",
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"/home/tfobe/anaconda3/envs/plumed/lib/python3.11/site-packages/tqdm/auto.py:22: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
" from .autonotebook import tqdm as notebook_tqdm\n"
]
}
],
"source": [
"import plumed\n",
"import signac\n",
"import signac_project\n",
"import matplotlib.pyplot as plt\n",
"import subprocess\n",
"import pandas as pd\n",
"import os\n",
"\n",
"plt.rcParams.update({'font.size':9})\n",
"kt = 300 * 8.314462618 * 10 ** -3"
]
},
{
"cell_type": "markdown",
"id": "7b594dc1",
"metadata": {},
"source": [
"## Get Signac setup"
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "4d7665ef",
"metadata": {},
"outputs": [],
"source": [
"project = signac.get_project()\n",
"df = project.to_dataframe()"
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "149bda64-632d-4202-8e0f-7056d1d0d6cc",
"metadata": {
"scrolled": false
},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"/tmp/ipykernel_495294/2111980519.py:3: DeprecatedWarning: get_id is deprecated as of 1.3 and will be removed in 2.0. Use job.id instead.\n",
" id = job.get_id()\n"
]
},
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>sp.height</th>\n",
" <th>sp.sigma</th>\n",
" <th>sp.bf</th>\n",
" <th>sp.replica</th>\n",
" <th>Completed</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>e005c8949f54d2f488a34a8c58afa9e7</th>\n",
" <td>2.5</td>\n",
" <td>0.5</td>\n",
" <td>50</td>\n",
" <td>1</td>\n",
" <td>True</td>\n",
" </tr>\n",
" <tr>\n",
" <th>92418f2455dbccd2c8e0a5fa837c912f</th>\n",
" <td>2.5</td>\n",
" <td>0.5</td>\n",
" <td>50</td>\n",
" <td>0</td>\n",
" <td>True</td>\n",
" </tr>\n",
" <tr>\n",
" <th>124094afb3eb5b9b79dc24d56c8dd0f4</th>\n",
" <td>2.5</td>\n",
" <td>0.5</td>\n",
" <td>50</td>\n",
" <td>4</td>\n",
" <td>True</td>\n",
" </tr>\n",
" <tr>\n",
" <th>31d8189088592b14526db82ab3ebf87c</th>\n",
" <td>2.5</td>\n",
" <td>0.5</td>\n",
" <td>50</td>\n",
" <td>2</td>\n",
" <td>True</td>\n",
" </tr>\n",
" <tr>\n",
" <th>5816dfcc3edff2917296a00af2380da5</th>\n",
" <td>2.5</td>\n",
" <td>0.5</td>\n",
" <td>50</td>\n",
" <td>3</td>\n",
" <td>True</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>"
],
"text/plain": [
" sp.height sp.sigma sp.bf sp.replica \\\n",
"e005c8949f54d2f488a34a8c58afa9e7 2.5 0.5 50 1 \n",
"92418f2455dbccd2c8e0a5fa837c912f 2.5 0.5 50 0 \n",
"124094afb3eb5b9b79dc24d56c8dd0f4 2.5 0.5 50 4 \n",
"31d8189088592b14526db82ab3ebf87c 2.5 0.5 50 2 \n",
"5816dfcc3edff2917296a00af2380da5 2.5 0.5 50 3 \n",
"\n",
" Completed \n",
"e005c8949f54d2f488a34a8c58afa9e7 True \n",
"92418f2455dbccd2c8e0a5fa837c912f True \n",
"124094afb3eb5b9b79dc24d56c8dd0f4 True \n",
"31d8189088592b14526db82ab3ebf87c True \n",
"5816dfcc3edff2917296a00af2380da5 True "
]
},
"execution_count": 3,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"status = []\n",
"for job in project.find_jobs():\n",
" id = job.get_id()\n",
" status.append(signac_project.check_production_npt_finish(job))\n",
"df[\"Completed\"] = status\n",
"pd.set_option('display.max_rows', None)\n",
"df.sort_values([\"Completed\", \"sp.bf\"], ascending=[False, True])"
]
},
{
"cell_type": "code",
"execution_count": 4,
"id": "1cc8b438",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Using environment configuration: StandardEnvironment\n",
"Fetching status: 100%|██████████████████████████| 45/45 [00:05<00:00, 8.15it/s]\n",
"Fetching labels: 100%|████████████████████████████| 5/5 [00:00<00:00, 23.05it/s]\n",
"\n",
"Overview: 5 jobs/aggregates, 5 jobs/aggregates with eligible operations.\n",
"\n",
"label ratio\n",
"--------------------------- --------------------------------------------------------\n",
"check_berendsen_npt_finish |████████████████████████████████████████| 5/5 (100.00%)\n",
"check_berendsen_npt_start |████████████████████████████████████████| 5/5 (100.00%)\n",
"check_berendsen_nvt_finish |████████████████████████████████████████| 5/5 (100.00%)\n",
"check_berendsen_nvt_start |████████████████████████████████████████| 5/5 (100.00%)\n",
"check_production_npt_finish |████████████████████████████████████████| 5/5 (100.00%)\n",
"check_production_npt_start |████████████████████████████████████████| 5/5 (100.00%)\n",
"check_submitted |████████████████████████████████████████| 5/5 (100.00%)\n",
"has_backup_files |████████████████████████████████████████| 5/5 (100.00%)\n",
"\n",
"operation/group number of eligible jobs submission status\n",
"---------------------------------- ------------------------- -------------------\n",
"continue_production_npt_simulation 5 [U]: 5\n",
"remove_backup_files 5 [U]: 5\n",
"plot_CV_bias 5 [U]: 5\n",
"calculate_sum_hills_FE 5 [U]: 5\n",
"\n",
"[U]:unknown [R]:registered [I]:inactive [S]:submitted [H]:held [Q]:queued [A]:active [E]:error [GR]:group_registered [GI]:group_inactive [GS]:group_submitted [GH]:group_held [GQ]:group_queued [GA]:group_active [GE]:group_error\n",
"\n",
"\n",
"WARNING: The status compilation took more than 0.2s per job. Consider using `--profile` to determine bottlenecks within the project workflow definition.\n",
"Execute `signac config set flow.status_performance_warn_threshold VALUE` to specify the warning threshold in seconds.\n",
"To speed up the compilation, try executing `signac config set flow.status_parallelization 'process'` to set the status_parallelization config value to process.Use -1 to completely suppress this warning.\n",
"\n"
]
}
],
"source": [
"!python signac_project.py status"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.0"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
project = mop_hexamer_mw_metad_extended
workspace_dir = workspace
schema_version = 1
Loading