diff --git a/.github/workflows/virtual-environments.yml b/.github/workflows/virtual-environments.yml
index 356736eb7caf..03f16bd2f014 100644
--- a/.github/workflows/virtual-environments.yml
+++ b/.github/workflows/virtual-environments.yml
@@ -48,11 +48,10 @@ jobs:
dotnet test ./Tests/bin/Release/QuantConnect.Tests.dll --filter "FullyQualifiedName=QuantConnect.Tests.Python.PythonPackagesTests.TensorflowProbabilityTest" --blame-hang-timeout 120seconds --blame-crash
# Run Hvplot Python Package Test
dotnet test ./Tests/bin/Release/QuantConnect.Tests.dll --filter "FullyQualifiedName=QuantConnect.Tests.Python.PythonPackagesTests.HvplotTest" --blame-hang-timeout 120seconds --blame-crash
- # Run Stellargraph Python Package Test
- dotnet test ./Tests/bin/Release/QuantConnect.Tests.dll --filter "FullyQualifiedName=QuantConnect.Tests.Python.PythonPackagesTests.StellargraphTest" --blame-hang-timeout 120seconds --blame-crash
# Run Keras Python Package Test
dotnet test ./Tests/bin/Release/QuantConnect.Tests.dll --filter "FullyQualifiedName=QuantConnect.Tests.Python.PythonPackagesTests.KerasTest" --blame-hang-timeout 120seconds --blame-crash
- # Run Scikeras Python Package Test
- dotnet test ./Tests/bin/Release/QuantConnect.Tests.dll --filter "FullyQualifiedName=QuantConnect.Tests.Python.PythonPackagesTests.ScikerasTest" --blame-hang-timeout 120seconds --blame-crash
# Run Transformers
- dotnet test ./Tests/bin/Release/QuantConnect.Tests.dll --filter "FullyQualifiedName=QuantConnect.Tests.Python.PythonPackagesTests.Transformers|XTransformers" --blame-hang-timeout 120seconds --blame-crash
\ No newline at end of file
+ dotnet test ./Tests/bin/Release/QuantConnect.Tests.dll --filter "FullyQualifiedName=QuantConnect.Tests.Python.PythonPackagesTests.Transformers" --blame-hang-timeout 120seconds --blame-crash
+ dotnet test ./Tests/bin/Release/QuantConnect.Tests.dll --filter "FullyQualifiedName=QuantConnect.Tests.Python.PythonPackagesTests.XTransformers" --blame-hang-timeout 120seconds --blame-crash
+ # Run Shap
+ dotnet test ./Tests/bin/Release/QuantConnect.Tests.dll --filter "FullyQualifiedName=QuantConnect.Tests.Python.PythonPackagesTests.ShapTest" --blame-hang-timeout 120seconds --blame-crash
\ No newline at end of file
diff --git a/Algorithm.CSharp/QuantConnect.Algorithm.CSharp.csproj b/Algorithm.CSharp/QuantConnect.Algorithm.CSharp.csproj
index 96dfa80733c1..28df0f8d7d81 100644
--- a/Algorithm.CSharp/QuantConnect.Algorithm.CSharp.csproj
+++ b/Algorithm.CSharp/QuantConnect.Algorithm.CSharp.csproj
@@ -34,7 +34,7 @@
portable
-
+
diff --git a/Algorithm.Framework/QuantConnect.Algorithm.Framework.csproj b/Algorithm.Framework/QuantConnect.Algorithm.Framework.csproj
index 724dc6a0a8da..b3ce44c5f62a 100644
--- a/Algorithm.Framework/QuantConnect.Algorithm.Framework.csproj
+++ b/Algorithm.Framework/QuantConnect.Algorithm.Framework.csproj
@@ -29,7 +29,7 @@
LICENSE
-
+
diff --git a/Algorithm.Python/ObjectStoreExampleAlgorithm.py b/Algorithm.Python/ObjectStoreExampleAlgorithm.py
index 66596110f9cf..9b0d727a5265 100644
--- a/Algorithm.Python/ObjectStoreExampleAlgorithm.py
+++ b/Algorithm.Python/ObjectStoreExampleAlgorithm.py
@@ -49,7 +49,7 @@ def Initialize(self):
history = pd.read_csv(StringIO(values), header=None, index_col=0, squeeze=True)
history.index = pd.to_datetime(history.index)
- for time, close in history.iteritems():
+ for time, close in history.items():
self.SPY_Close.Update(time, close)
else:
@@ -59,7 +59,7 @@ def Initialize(self):
# we're pulling the last year's worth of SPY daily trade bars to fee into our indicators
history = self.History(self.SPY, timedelta(365), Resolution.Daily).close.unstack(0).squeeze()
- for time, close in history.iteritems():
+ for time, close in history.items():
self.SPY_Close.Update(time, close)
# save our warm up data so next time we don't need to issue the history request
diff --git a/Algorithm.Python/QuantConnect.Algorithm.Python.csproj b/Algorithm.Python/QuantConnect.Algorithm.Python.csproj
index 7aff8c778c32..a85124fe1c7e 100644
--- a/Algorithm.Python/QuantConnect.Algorithm.Python.csproj
+++ b/Algorithm.Python/QuantConnect.Algorithm.Python.csproj
@@ -39,7 +39,7 @@
-
+
diff --git a/Algorithm.Python/SetHoldingsRegressionAlgorithm.py b/Algorithm.Python/SetHoldingsRegressionAlgorithm.py
index 1504d128e7f2..a5bdd4f12d54 100644
--- a/Algorithm.Python/SetHoldingsRegressionAlgorithm.py
+++ b/Algorithm.Python/SetHoldingsRegressionAlgorithm.py
@@ -34,6 +34,6 @@ def OnData(self, data):
'''
if not self.Portfolio.Invested:
self.SetHoldings("SPY", 0.1)
- self.SetHoldings("SPY", np.float(0.20))
+ self.SetHoldings("SPY", float(0.20))
self.SetHoldings("SPY", np.float64(0.30))
self.SetHoldings("SPY", 1)
diff --git a/Algorithm/QuantConnect.Algorithm.csproj b/Algorithm/QuantConnect.Algorithm.csproj
index b2c0eb72d113..f4d57c620f85 100644
--- a/Algorithm/QuantConnect.Algorithm.csproj
+++ b/Algorithm/QuantConnect.Algorithm.csproj
@@ -29,7 +29,7 @@
LICENSE
-
+
diff --git a/AlgorithmFactory/Python/Wrappers/AlgorithmPythonWrapper.cs b/AlgorithmFactory/Python/Wrappers/AlgorithmPythonWrapper.cs
index 6d12891204b8..36ff9b2b4d47 100644
--- a/AlgorithmFactory/Python/Wrappers/AlgorithmPythonWrapper.cs
+++ b/AlgorithmFactory/Python/Wrappers/AlgorithmPythonWrapper.cs
@@ -782,7 +782,7 @@ public void OnEndOfDay()
// Only throws if there is an error in its implementation body
catch (PythonException exception)
{
- if (!exception.Message.StartsWith("OnEndOfDay()"))
+ if (!exception.Message.Contains("OnEndOfDay() missing 1 required positional argument"))
{
_baseAlgorithm.SetRunTimeError(exception);
}
@@ -810,7 +810,7 @@ public void OnEndOfDay(Symbol symbol)
// Only throws if there is an error in its implementation body
catch (PythonException exception)
{
- if (!exception.Message.StartsWith("OnEndOfDay()"))
+ if (!exception.Message.Contains("OnEndOfDay() takes 1 positional argument but 2 were given"))
{
_baseAlgorithm.SetRunTimeError(exception);
}
diff --git a/AlgorithmFactory/QuantConnect.AlgorithmFactory.csproj b/AlgorithmFactory/QuantConnect.AlgorithmFactory.csproj
index 1d71dfc20eca..ed7f9b869cae 100644
--- a/AlgorithmFactory/QuantConnect.AlgorithmFactory.csproj
+++ b/AlgorithmFactory/QuantConnect.AlgorithmFactory.csproj
@@ -28,7 +28,7 @@
LICENSE
-
+
diff --git a/Common/QuantConnect.csproj b/Common/QuantConnect.csproj
index f1d8936a626f..0a428884df08 100644
--- a/Common/QuantConnect.csproj
+++ b/Common/QuantConnect.csproj
@@ -34,7 +34,7 @@
-
+
diff --git a/DockerfileLeanFoundation b/DockerfileLeanFoundation
index 244ccbee0c5a..f4892051f0ab 100644
--- a/DockerfileLeanFoundation
+++ b/DockerfileLeanFoundation
@@ -26,13 +26,13 @@ RUN apt-get update && apt-get install -y dotnet-sdk-6.0 && \
apt-get clean && apt-get autoclean && apt-get autoremove --purge -y && rm -rf /var/lib/apt/lists/*
# Set PythonDLL variable for PythonNet
-ENV PYTHONNET_PYDLL="/opt/miniconda3/lib/libpython3.8.so"
+ENV PYTHONNET_PYDLL="/opt/miniconda3/lib/libpython3.11.so"
# Install miniconda
-ENV CONDA="Miniconda3-py38_23.1.0-1-Linux-x86_64.sh"
+ENV CONDA="Miniconda3-py311_24.1.2-0-Linux-x86_64.sh"
ENV PATH="/opt/miniconda3/bin:${PATH}"
RUN wget -q https://cdn.quantconnect.com/miniconda/${CONDA} && \
- bash ${CONDA} -b -p /opt/miniconda3 && rm -rf ${CONDA}
+ bash ${CONDA} -b -p /opt/miniconda3 && rm -rf ${CONDA} && conda config --set solver classic
# Install java runtime for h2o lib
RUN wget https://download.oracle.com/java/17/latest/jdk-17_linux-x64_bin.deb \
@@ -45,221 +45,225 @@ ENV PIP_DEFAULT_TIMEOUT=120
# Install all packages
RUN pip install --no-cache-dir \
- cython==0.29.36 \
- pandas==1.5.3 \
- scipy==1.10.1 \
- numpy==1.23.5 \
- wrapt==1.14.1 \
- astropy==5.2.2 \
- beautifulsoup4==4.12.2 \
- dill==0.3.7 \
- jsonschema==4.19.1 \
- lxml==4.9.3 \
- msgpack==1.0.7 \
- numba==0.56.4 \
- xarray==2023.1.0 \
- plotly==5.17.0 \
- jupyterlab==3.4.4 \
- tensorflow==2.13.1 \
+ cython==3.0.9 \
+ pandas==2.1.4 \
+ scipy==1.11.4 \
+ numpy==1.26.4 \
+ wrapt==1.16.0 \
+ astropy==6.0.0 \
+ beautifulsoup4==4.12.3 \
+ dill==0.3.8 \
+ jsonschema==4.21.1 \
+ lxml==5.1.0 \
+ msgpack==1.0.8 \
+ numba==0.59.0 \
+ xarray==2024.2.0 \
+ plotly==5.20.0 \
+ jupyterlab==4.1.5 \
+ tensorflow==2.16.1 \
docutils==0.20.1 \
cvxopt==1.3.2 \
gensim==4.3.2 \
- keras==2.13.1 \
- lightgbm==4.1.0 \
- mpi4py==3.1.5 \
+ keras==3.0.5 \
+ lightgbm==4.3.0 \
nltk==3.8.1 \
graphviz==0.20.1 \
- cmdstanpy==1.2.0 \
- copulae==0.7.8 \
- featuretools==1.27.0 \
- PuLP==2.7.0 \
- pymc==5.6.1 \
+ cmdstanpy==1.2.1 \
+ copulae==0.7.9 \
+ featuretools==1.30.0 \
+ PuLP==2.8.0 \
+ pymc==5.10.4 \
rauth==0.7.3 \
- scikit-learn==1.3.2 \
- scikit-multiflow==0.5.3 \
- scikit-optimize==0.9.0 \
- aesara==2.9.2 \
- tsfresh==0.20.1 \
- tslearn==0.6.2 \
+ scikit-learn==1.4.1.post1 \
+ scikit-optimize==0.10.0 \
+ aesara==2.9.3 \
+ tsfresh==0.20.2 \
+ tslearn==0.6.3 \
tweepy==4.14.0 \
- PyWavelets==1.4.1 \
- umap-learn==0.5.3 \
- fastai==2.7.13 \
- arch==5.6.0 \
- copulas==0.9.2 \
+ PyWavelets==1.5.0 \
+ umap-learn==0.5.5 \
+ fastai==2.7.14 \
+ arch==6.3.0 \
+ copulas==0.10.1 \
creme==0.6.1 \
cufflinks==0.17.3 \
gym==0.26.2 \
- ipywidgets==8.1.1 \
+ ipywidgets==8.1.2 \
deap==1.4.1 \
- cvxpy==1.4.1 \
- pykalman==0.9.5 \
+ pykalman==0.9.7 \
+ cvxpy==1.4.2 \
pyportfolioopt==1.5.5 \
- pmdarima==2.0.3 \
- pyro-ppl==1.8.6 \
- riskparityportfolio==0.4 \
+ pmdarima==2.0.4 \
+ pyro-ppl==1.9.0 \
+ riskparityportfolio==0.5.1 \
sklearn-json==0.1.0 \
- statsmodels==0.13.5 \
- QuantLib==1.31.1 \
- xgboost==2.0.0 \
- dtw-python==1.3.0 \
- gluonts==0.13.7 \
+ statsmodels==0.14.1 \
+ QuantLib==1.33 \
+ xgboost==2.0.3 \
+ dtw-python==1.3.1 \
+ gluonts==0.14.4 \
gplearn==0.4.2 \
- jax==0.4.13 \
- jaxlib==0.4.13 \
+ jax==0.4.25 \
+ jaxlib==0.4.25 \
keras-rl==0.4.2 \
- pennylane==0.32.0 \
- PennyLane-Lightning==0.32.0 \
- pennylane-qiskit==0.32.0 \
- qiskit==0.44.2 \
- neural-tangents==0.6.2 \
+ pennylane==0.35.1 \
+ PennyLane-Lightning==0.35.1 \
+ pennylane-qiskit==0.35.1 \
+ qiskit==1.0.2 \
+ neural-tangents==0.6.5 \
mplfinance==0.12.10b0 \
- hmmlearn==0.3.0 \
- catboost==1.2.2 \
+ hmmlearn==0.3.2 \
+ catboost==1.2.3 \
fastai2==0.0.30 \
scikit-tda==1.0.0 \
- ta==0.10.2 \
- seaborn==0.13.0 \
- optuna==3.4.0 \
+ ta==0.11.0 \
+ seaborn==0.13.2 \
+ optuna==3.5.0 \
findiff==0.10.0 \
- sktime==0.24.0 \
+ sktime==0.27.1 \
hyperopt==0.2.7 \
bayesian-optimization==1.4.3 \
- pingouin==0.5.3 \
- quantecon==0.7.1 \
- matplotlib==3.7.3 \
+ pingouin==0.5.4 \
+ quantecon==0.7.2 \
+ matplotlib==3.7.5 \
sdeint==0.3.0 \
- pandas_market_calendars==4.3.1 \
- dgl==1.1.2 \
- ruptures==1.1.8 \
- simpy==4.0.2 \
+ pandas_market_calendars==4.4.0 \
+ dgl==2.1.0 \
+ ruptures==1.1.9 \
+ simpy==4.1.1 \
scikit-learn-extra==0.3.0 \
- ray==2.7.1 \
- "ray[tune]"==2.7.1 \
- "ray[rllib]"==2.7.1 \
+ ray==2.9.3 \
+ "ray[tune]"==2.9.3 \
+ "ray[rllib]"==2.9.3 \
fastText==0.9.2 \
- h2o==3.44.0.1 \
+ h2o==3.46.0.1 \
prophet==1.1.5 \
- torch==2.1.0 \
- torchvision==0.16.0 \
- ax-platform==0.3.3 \
+ torch==2.2.1 \
+ torchvision==0.17.1 \
+ ax-platform==0.3.7 \
alphalens-reloaded==0.4.3 \
pyfolio-reloaded==0.9.5 \
- altair==5.1.2 \
- stellargraph==1.2.1 \
- modin==0.22.3 \
- persim==0.3.1 \
- ripser==0.6.4 \
- pydmd==0.4.1.post2308 \
- EMD-signal==1.5.2 \
- spacy==3.7.2 \
+ altair==5.2.0 \
+ modin==0.26.1 \
+ persim==0.3.5 \
+ ripser==0.6.8 \
+ pydmd==1.0.0 \
+ spacy==3.7.4 \
pandas-ta==0.3.14b \
- pytorch-ignite==0.4.12 \
+ pytorch-ignite==0.4.13 \
tensorly==0.8.1 \
- mlxtend==0.23.0 \
- shap==0.43.0 \
+ mlxtend==0.23.1 \
+ shap==0.45.0 \
lime==0.2.0.1 \
- tensorflow-probability==0.21.0 \
+ tensorflow-probability==0.24.0 \
mpmath==1.3.0 \
tensortrade==1.0.3 \
- polars==0.19.11 \
- stockstats==0.5.4 \
- autokeras==1.1.0 \
+ polars==0.20.15 \
+ stockstats==0.6.2 \
+ autokeras==2.0.0 \
QuantStats==0.0.62 \
hurst==0.0.5 \
- numerapi==2.16.1 \
+ numerapi==2.18.0 \
pymdptoolbox==4.0-b3 \
- fuzzy-c-means==1.6.3 \
- panel==1.2.3 \
- hvplot==0.9.0 \
- line-profiler==4.1.1 \
+ panel==1.3.8 \
+ hvplot==0.9.2 \
+ line-profiler==4.1.2 \
py-heat==0.0.6 \
py-heat-magic==0.0.2 \
- bokeh==3.1.1 \
- tensorflow-decision-forests==1.5.0 \
- river==0.14.0 \
+ bokeh==3.3.4 \
+ tensorflow-decision-forests==1.9.0 \
+ river==0.21.0 \
stumpy==1.12.0 \
- pyvinecopulib==0.6.3 \
+ pyvinecopulib==0.6.4 \
ijson==3.2.3 \
- jupyter-resource-usage==0.7.2 \
+ jupyter-resource-usage==1.0.2 \
injector==0.21.0 \
openpyxl==3.1.2 \
xlrd==2.0.1 \
- mljar-supervised==1.0.2 \
+ mljar-supervised==1.1.6 \
dm-tree==0.1.8 \
- lz4==4.3.2 \
- ortools==9.7.2996 \
+ lz4==4.3.3 \
+ ortools==9.9.3963 \
py_vollib==1.0.1 \
- tensorflow-addons==0.21.0 \
thundergbm==0.3.17 \
yellowbrick==1.5 \
livelossplot==0.5.5 \
gymnasium==0.28.1 \
- interpret==0.4.4 \
- DoubleML==0.7.0 \
- jupyter-bokeh==3.0.7 \
- imbalanced-learn==0.11.0 \
- scikeras==0.12.0 \
- openai==1.3.5 \
- lazypredict==0.2.12 \
- fracdiff==0.9.0 \
- darts==0.24.0 \
- fastparquet==2023.8.0 \
- tables==3.8.0 \
- dimod==0.12.3 \
- dwave-samplers==1.0.0 \
+ interpret==0.5.1 \
+ DoubleML==0.7.1 \
+ jupyter-bokeh==4.0.0 \
+ imbalanced-learn==0.12.0 \
+ openai==1.14.3 \
+ lazypredict-nightly==0.3.0 \
+ darts==0.28.0 \
+ fastparquet==2024.2.0 \
+ tables==3.9.2 \
+ dimod==0.12.14 \
+ dwave-samplers==1.2.0 \
python-statemachine==2.1.2 \
pymannkendall==1.4.3 \
- Pyomo==6.6.2 \
- gpflow==2.9.0 \
- pyarrow==13.0.0 \
- dwave-ocean-sdk==6.1.1 \
+ Pyomo==6.7.1 \
+ gpflow==2.9.1 \
+ pyarrow==15.0.1 \
+ dwave-ocean-sdk==6.9.0 \
chardet==5.2.0 \
- stable-baselines3==2.1.0 \
+ stable-baselines3==2.2.1 \
Shimmy==1.3.0 \
- pystan==3.7.0 \
+ pystan==3.9.0 \
FixedEffectModel==0.0.5 \
- tick==0.7.0.1 \
- transformers==4.34.0 \
- Rbeast==0.1.16 \
- langchain==0.0.341 \
- tensorflow-ranking==0.5.3 \
- pomegranate==1.0.3 \
- tigramite==5.2.3.1 \
- MAPIE==0.7.0 \
- mlforecast==0.9.3 \
- functime==0.8.4 \
+ transformers==4.38.2 \
+ Rbeast==0.1.19 \
+ langchain==0.1.12 \
+ pomegranate==1.0.4 \
+ MAPIE==0.8.3 \
+ mlforecast==0.12.0 \
+ functime==0.9.5 \
tensorrt==8.6.1.post1 \
- x-transformers==1.26.0 \
- Werkzeug==2.3.8
+ x-transformers==1.27.19 \
+ Werkzeug==3.0.1 \
+ TPOT==0.12.2 \
+ llama-index==0.10.19 \
+ mlflow==2.11.1 \
+ ngboost==0.5.1 \
+ pycaret==3.3.0 \
+ control==0.9.4 \
+ pgmpy==0.1.25 \
+ mgarch==0.3.0 \
+ jupyter-ai==2.12.0 \
+ keras-tcn==3.5.0 \
+ neuralprophet[live]==0.8.0 \
+ Riskfolio-Lib==6.0.0 \
+ fuzzy-c-means==1.7.2 \
+ EMD-signal==1.6.0 \
+ dask[complete]==2024.3.1
RUN conda install -c conda-forge -y cudatoolkit=11.8.0 && conda install -c nvidia -y cuda-compiler=12.2.2 && conda clean -y --all
ENV XLA_FLAGS=--xla_gpu_cuda_data_dir=/opt/miniconda3/
ENV LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/opt/miniconda3/pkgs/cudatoolkit-11.8.0-h6a678d5_0/lib/:/opt/miniconda3/lib/python3.8/site-packages/nvidia/cudnn/lib/:/opt/miniconda3/lib/python3.8/site-packages/tensorrt_libs/
ENV CUDA_MODULE_LOADING=LAZY
+# mamba-ssm & causal requires nvidia capabilities to be installed. iisignature requires numpy to be already installed
+RUN pip install --no-cache-dir mamba-ssm==1.2.0.post1 causal-conv1d==1.2.0.post2 iisignature==0.24
+
# Install dwave tool
RUN dwave install --all -y
# Install 'ipopt' solver for 'Pyomo'
-RUN conda install -c conda-forge ipopt==3.14.13 \
+RUN conda install -c conda-forge ipopt==3.14.14 \
&& conda clean -y --all
-# We install need to install separately else fails to find numpy
-RUN pip install --no-cache-dir Riskfolio-Lib==4.4.2 iisignature==0.24
-
# Install spacy models
RUN python -m spacy download en_core_web_md && python -m spacy download en_core_web_sm
RUN conda install -y -c conda-forge \
- openmpi=4.1.6 \
+ openmpi=5.0.2 \
&& conda clean -y --all
# Install PyTorch Geometric
RUN TORCH=$(python -c "import torch; print(torch.__version__)") && \
CUDA=$(python -c "import torch; print('cu' + torch.version.cuda.replace('.', ''))") && \
pip install --no-cache-dir -f https://pytorch-geometric.com/whl/torch-${TORCH}+${CUDA}.html \
- torch-scatter==2.1.2 torch-sparse==0.6.18 torch-cluster==1.6.3 torch-spline-conv==1.2.2 torch-geometric==2.4.0
+ torch-scatter==2.1.2 torch-sparse==0.6.18 torch-cluster==1.6.3 torch-spline-conv==1.2.2 torch-geometric==2.5.1
# Install nltk data
RUN python -m nltk.downloader -d /usr/share/nltk_data punkt && \
@@ -267,16 +271,6 @@ RUN python -m nltk.downloader -d /usr/share/nltk_data punkt && \
python -m nltk.downloader -d /usr/share/nltk_data stopwords && \
python -m nltk.downloader -d /usr/share/nltk_data wordnet
-# Install ppscore
-RUN wget -q https://cdn.quantconnect.com/ppscore/ppscore-master-ce93fa3.zip && \
- unzip -q ppscore-master-ce93fa3.zip && cd ppscore-master && \
- pip install . && cd .. && rm -rf ppscore-master && rm ppscore-master-ce93fa3.zip
-
-# Install DX Analytics
-RUN wget -q https://cdn.quantconnect.com/dx/dx-master-69922c0.zip && \
- unzip -q dx-master-69922c0.zip && cd dx-master && \
- pip install . && cd .. && rm -rf dx-master && rm dx-master-69922c0.zip
-
# Install Pyrb
RUN wget -q https://cdn.quantconnect.com/pyrb/pyrb-master-250054e.zip && \
unzip -q pyrb-master-250054e.zip && cd pyrb-master && \
@@ -291,9 +285,8 @@ RUN wget -q https://cdn.quantconnect.com/ssm/ssm-master-646e188.zip && \
RUN wget -q https://cdn.quantconnect.com/ta-lib/ta-lib-0.4.0-src.tar.gz && \
tar -zxvf ta-lib-0.4.0-src.tar.gz && cd ta-lib && \
./configure --prefix=/usr && make && make install && \
- wget -q https://cdn.quantconnect.com/ta-lib/TA_Lib-0.4.18.zip && \
- unzip -q TA_Lib-0.4.18.zip && cd ta-lib-TA_Lib-0.4.18 && \
- python setup.py install && cd ../.. && rm -rf ta-lib && rm ta-lib-0.4.0-src.tar.gz
+ cd .. && rm -rf ta-lib && rm ta-lib-0.4.0-src.tar.gz && \
+ pip install --no-cache-dir TA-Lib==0.4.28
RUN echo "{\"argv\":[\"python\",\"-m\",\"ipykernel_launcher\",\"-f\",\"{connection_file}\"],\"display_name\":\"Foundation-Py-Default\",\"language\":\"python\",\"metadata\":{\"debugger\":true}}" > /opt/miniconda3/share/jupyter/kernels/python3/kernel.json
diff --git a/DockerfileLeanFoundationARM b/DockerfileLeanFoundationARM
index ebb437f73eef..fa4fbd6cb770 100644
--- a/DockerfileLeanFoundationARM
+++ b/DockerfileLeanFoundationARM
@@ -12,7 +12,7 @@ CMD ["/sbin/my_init"]
# Install OS Packages:
# Misc tools for running Python.NET and IB inside a headless container.
RUN add-apt-repository ppa:ubuntu-toolchain-r/test && apt-get update \
- && apt-get install -y git libgtk2.0.0 cmake bzip2 curl unzip wget python3-pip python3-opengl zlib1g-dev \
+ && apt-get install -y git libgtk2.0.0 bzip2 curl unzip wget python3-pip python3-opengl zlib1g-dev \
xvfb libxrender1 libxtst6 libxi6 libglib2.0-dev libopenmpi-dev libstdc++6 openmpi-bin \
r-base pandoc libcurl4-openssl-dev \
openjdk-11-jdk openjdk-11-jre bbe \
@@ -29,10 +29,10 @@ RUN wget https://dot.net/v1/dotnet-install.sh && \
ENV DOTNET_ROOT="/root/.dotnet"
# Set PythonDLL variable for PythonNet
-ENV PYTHONNET_PYDLL="/opt/miniconda3/lib/libpython3.8.so"
+ENV PYTHONNET_PYDLL="/opt/miniconda3/lib/libpython3.11.so"
# Install miniconda
-ENV CONDA="Miniconda3-py38_23.1.0-1-Linux-aarch64.sh"
+ENV CONDA="Miniconda3-py311_24.1.2-0-Linux-aarch64.sh"
ENV PATH="/opt/miniconda3/bin:${PATH}"
RUN wget -q https://cdn.quantconnect.com/miniconda/${CONDA} && \
bash ${CONDA} -b -p /opt/miniconda3 && rm -rf ${CONDA}
@@ -47,175 +47,171 @@ RUN apt-get update && apt-get install -y alien dpkg-dev debhelper build-essentia
ENV PIP_DEFAULT_TIMEOUT=120
# Install numpy first to avoid it not being resolved when installing libraries that depend on it next
-RUN pip install --no-cache-dir numpy==1.23.5
+RUN pip install --no-cache-dir numpy==1.26.4
+
+# Install newer (than provided by ubuntu) cmake required by scikit build process
+RUN conda install -c conda-forge cmake==3.28.4 && conda clean -y --all
# The list of packages in this image is shorter than the list in the AMD images
# This list only includes packages that can be installed within 2 minutes on ARM
RUN pip install --no-cache-dir \
- cython==0.29.36 \
- pandas==1.5.3 \
- scipy==1.10.1 \
- numpy==1.23.5 \
- wrapt==1.14.1 \
- astropy==5.2.2 \
- beautifulsoup4==4.12.2 \
- dill==0.3.7 \
- jsonschema==4.19.1 \
- lxml==4.9.3 \
- msgpack==1.0.7 \
- numba==0.56.4 \
- xarray==2023.1.0 \
- plotly==5.17.0 \
- jupyterlab==3.4.4 \
- tensorflow==2.13.1 \
+ cython==3.0.9 \
+ pandas==2.1.4 \
+ scipy==1.11.4 \
+ numpy==1.26.4 \
+ wrapt==1.16.0 \
+ astropy==6.0.0 \
+ beautifulsoup4==4.12.3 \
+ dill==0.3.8 \
+ jsonschema==4.21.1 \
+ lxml==5.1.0 \
+ msgpack==1.0.8 \
+ numba==0.59.0 \
+ xarray==2024.2.0 \
+ plotly==5.20.0 \
+ jupyterlab==4.1.5 \
+ tensorflow==2.16.1 \
docutils==0.20.1 \
gensim==4.3.2 \
- keras==2.13.1 \
- lightgbm==4.1.0 \
- mpi4py==3.1.5 \
+ keras==3.0.5 \
+ lightgbm==4.3.0 \
nltk==3.8.1 \
graphviz==0.20.1 \
- cmdstanpy==1.2.0 \
- copulae==0.7.8 \
- featuretools==1.27.0 \
- PuLP==2.7.0 \
- pymc==5.6.1 \
+ cmdstanpy==1.2.1 \
+ copulae==0.7.9 \
+ featuretools==1.30.0 \
+ PuLP==2.8.0 \
+ pymc==5.10.4 \
rauth==0.7.3 \
- scikit-learn==1.3.2 \
- scikit-multiflow==0.5.3 \
- scikit-optimize==0.9.0 \
- aesara==2.9.2 \
- tsfresh==0.20.1 \
- tslearn==0.6.2 \
+ scikit-learn==1.4.1.post1 \
+ scikit-optimize==0.10.0 \
+ aesara==2.9.3 \
+ tsfresh==0.20.2 \
+ tslearn==0.6.3 \
tweepy==4.14.0 \
- PyWavelets==1.4.1 \
- umap-learn==0.5.3 \
- fastai==2.7.13 \
- arch==5.6.0 \
- copulas==0.9.2 \
+ PyWavelets==1.5.0 \
+ umap-learn==0.5.5 \
+ fastai==2.7.14 \
+ arch==6.3.0 \
+ copulas==0.10.1 \
cufflinks==0.17.3 \
gym==0.26.2 \
- ipywidgets==8.1.1 \
+ ipywidgets==8.1.2 \
deap==1.4.1 \
- cvxpy==1.4.1 \
- pykalman==0.9.5 \
- pyro-ppl==1.8.6 \
+ pykalman==0.9.7 \
+ cvxpy==1.4.2 \
+ pyro-ppl==1.9.0 \
sklearn-json==0.1.0 \
- dtw-python==1.3.0 \
- gluonts==0.13.7 \
+ dtw-python==1.3.1 \
+ gluonts==0.14.4 \
gplearn==0.4.2 \
- jax==0.4.12 \
- pennylane==0.32.0 \
- PennyLane-Lightning==0.32.0 \
- pennylane-qiskit==0.32.0 \
+ jax==0.4.25 \
+ pennylane==0.35.1 \
+ PennyLane-Lightning==0.35.1 \
+ pennylane-qiskit==0.35.1 \
mplfinance==0.12.10b0 \
- hmmlearn==0.3.0 \
- ta==0.10.2 \
- seaborn==0.13.0 \
- optuna==3.4.0 \
+ hmmlearn==0.3.2 \
+ ta==0.11.0 \
+ seaborn==0.13.2 \
+ optuna==3.5.0 \
findiff==0.10.0 \
- sktime==0.24.0 \
+ sktime==0.27.1 \
hyperopt==0.2.7 \
bayesian-optimization==1.4.3 \
- matplotlib==3.7.3 \
+ matplotlib==3.7.5 \
sdeint==0.3.0 \
- pandas_market_calendars==4.3.1 \
- ruptures==1.1.8 \
- simpy==4.0.2 \
+ pandas_market_calendars==4.4.0 \
+ ruptures==1.1.9 \
+ simpy==4.1.1 \
scikit-learn-extra==0.3.0 \
- ray==2.7.1 \
- "ray[tune]"==2.7.1 \
- "ray[rllib]"==2.7.1 \
+ ray==2.9.3 \
+ "ray[tune]"==2.9.3 \
+ "ray[rllib]"==2.9.3 \
fastText==0.9.2 \
- h2o==3.44.0.1 \
+ h2o==3.46.0.1 \
prophet==1.1.5 \
- Riskfolio-Lib==4.0.3 \
- torch==2.1.0 \
- torchvision==0.16.0 \
- ax-platform==0.3.3 \
+ Riskfolio-Lib==6.0.0 \
+ torch==2.2.1 \
+ torchvision==0.17.1 \
+ ax-platform==0.3.7 \
alphalens-reloaded==0.4.3 \
pyfolio-reloaded==0.9.5 \
- altair==5.1.2 \
- stellargraph==1.2.1 \
- modin==0.22.3 \
- persim==0.3.1 \
- ripser==0.6.4 \
- pydmd==0.4.1.post2308 \
- EMD-signal==1.5.2 \
- spacy==3.7.2 \
+ altair==5.2.0 \
+ modin==0.26.1 \
+ persim==0.3.5 \
+ ripser==0.6.8 \
+ pydmd==1.0.0 \
+ EMD-signal==1.6.0 \
+ spacy==3.7.4 \
pandas-ta==0.3.14b \
- pytorch-ignite==0.4.12 \
+ pytorch-ignite==0.4.13 \
tensorly==0.8.1 \
- mlxtend==0.23.0 \
- shap==0.43.0 \
+ mlxtend==0.23.1 \
+ shap==0.45.0 \
lime==0.2.0.1 \
mpmath==1.3.0 \
- polars==0.19.11 \
- stockstats==0.5.4 \
+ polars==0.20.15 \
+ stockstats==0.6.2 \
QuantStats==0.0.62 \
hurst==0.0.5 \
- numerapi==2.16.1 \
+ numerapi==2.18.0 \
pymdptoolbox==4.0-b3 \
- panel==1.2.3 \
- hvplot==0.9.0 \
+ panel==1.3.8 \
+ hvplot==0.9.2 \
py-heat==0.0.6 \
py-heat-magic==0.0.2 \
- bokeh==3.1.1 \
- river==0.14.0 \
+ bokeh==3.3.4 \
+ river==0.21.0 \
stumpy==1.12.0 \
- pyvinecopulib==0.6.3 \
+ pyvinecopulib==0.6.4 \
ijson==3.2.3 \
- jupyter-resource-usage==0.7.2 \
+ jupyter-resource-usage==1.0.2 \
injector==0.21.0 \
openpyxl==3.1.2 \
xlrd==2.0.1 \
- mljar-supervised==1.0.2 \
+ mljar-supervised==1.1.6 \
dm-tree==0.1.8 \
- lz4==4.3.2 \
- ortools==9.6.2534 \
+ lz4==4.3.3 \
+ ortools==9.9.3963 \
py_vollib==1.0.1 \
thundergbm==0.3.17 \
yellowbrick==1.5 \
livelossplot==0.5.5 \
gymnasium==0.28.1 \
- interpret==0.4.4 \
- DoubleML==0.7.0 \
- jupyter-bokeh==3.0.7 \
- imbalanced-learn==0.11.0 \
- scikeras==0.12.0 \
- openai==1.3.5 \
- lazypredict==0.2.12 \
- fracdiff==0.9.0 \
- darts==0.24.0 \
- fastparquet==2023.8.0 \
- tables==3.8.0 \
- dimod==0.12.3 \
- dwave-samplers==1.0.0 \
+ interpret==0.5.1 \
+ DoubleML==0.7.1 \
+ jupyter-bokeh==4.0.0 \
+ imbalanced-learn==0.12.0 \
+ openai==1.14.3 \
+ lazypredict-nightly==0.3.0 \
+ darts==0.28.0 \
+ fastparquet==2024.2.0 \
+ tables==3.9.2 \
+ dimod==0.12.14 \
+ dwave-samplers==1.2.0 \
python-statemachine==2.1.2 \
pymannkendall==1.4.3 \
- Pyomo==6.6.2 \
- gpflow==2.9.0 \
- pyarrow==13.0.0 \
- dwave-ocean-sdk==6.1.1 \
+ Pyomo==6.7.1 \
+ gpflow==2.9.1 \
+ pyarrow==15.0.1 \
+ dwave-ocean-sdk==6.9.0 \
chardet==5.2.0 \
- stable-baselines3==2.1.0 \
+ stable-baselines3==2.2.1 \
Shimmy==1.3.0 \
FixedEffectModel==0.0.5 \
- transformers==4.34.0 \
- langchain==0.0.341 \
- tensorflow-ranking==0.5.3 \
- pomegranate==1.0.3 \
- tigramite==5.2.3.1 \
- MAPIE==0.7.0 \
- mlforecast==0.9.3 \
- x-transformers==1.26.0 \
- Werkzeug==2.3.8
+ transformers==4.38.2 \
+ langchain==0.1.12 \
+ pomegranate==1.0.4 \
+ MAPIE==0.8.3 \
+ mlforecast==0.12.0 \
+ x-transformers==1.27.19 \
+ Werkzeug==3.0.1
# Install dwave tool
RUN dwave install --all -y
# Install 'ipopt' solver for 'Pyomo'
-RUN conda install -c conda-forge ipopt==3.14.13 \
+RUN conda install -c conda-forge ipopt==3.14.14 \
&& conda clean -y --all
# We install need to install separately else fails to find numpy
@@ -224,8 +220,8 @@ RUN pip install --no-cache-dir iisignature==0.24
# Install spacy models
RUN python -m spacy download en_core_web_md && python -m spacy download en_core_web_sm
-RUN conda install -y -c conda-forge \
- openmpi=4.1.6 \
+RUN conda config --set solver classic && conda install -y -c conda-forge \
+ openmpi=5.0.2 \
&& conda clean -y --all
# Install nltk data
@@ -234,16 +230,6 @@ RUN python -m nltk.downloader -d /usr/share/nltk_data punkt && \
python -m nltk.downloader -d /usr/share/nltk_data stopwords && \
python -m nltk.downloader -d /usr/share/nltk_data wordnet
-# Install ppscore
-RUN wget -q https://cdn.quantconnect.com/ppscore/ppscore-master-ce93fa3.zip && \
- unzip -q ppscore-master-ce93fa3.zip && cd ppscore-master && \
- pip install . && cd .. && rm -rf ppscore-master && rm ppscore-master-ce93fa3.zip
-
-# Install DX Analytics
-RUN wget -q https://cdn.quantconnect.com/dx/dx-master-69922c0.zip && \
- unzip -q dx-master-69922c0.zip && cd dx-master && \
- pip install . && cd .. && rm -rf dx-master && rm dx-master-69922c0.zip
-
# Install Pyrb
RUN wget -q https://cdn.quantconnect.com/pyrb/pyrb-master-250054e.zip && \
unzip -q pyrb-master-250054e.zip && cd pyrb-master && \
@@ -254,17 +240,6 @@ RUN wget -q https://cdn.quantconnect.com/ssm/ssm-master-646e188.zip && \
unzip -q ssm-master-646e188.zip && cd ssm-master && \
pip install . && cd .. && rm -rf ssm-master && rm ssm-master-646e188.zip
-# Due to conflicts install 'pomegranate' virtual environment package
-RUN python -m venv /Foundation-Pomegranate --system-site-packages && . /Foundation-Pomegranate/bin/activate \
- && pip install --no-cache-dir \
- pomegranate==0.14.8 \
- mxnet==1.9.1 \
- nbeats-keras==1.8.0 \
- nbeats-pytorch==1.8.0 \
- neuralprophet[live]==0.6.2 \
- && python -m ipykernel install --name=Foundation-Pomegranate \
- && deactivate
-
RUN echo "{\"argv\":[\"python\",\"-m\",\"ipykernel_launcher\",\"-f\",\"{connection_file}\"],\"display_name\":\"Foundation-Py-Default\",\"language\":\"python\",\"metadata\":{\"debugger\":true}}" > /opt/miniconda3/share/jupyter/kernels/python3/kernel.json
# Install wkhtmltopdf and xvfb to support HTML to PDF conversion of reports
diff --git a/Engine/QuantConnect.Lean.Engine.csproj b/Engine/QuantConnect.Lean.Engine.csproj
index 48cba3c79f08..45871b80bcdd 100644
--- a/Engine/QuantConnect.Lean.Engine.csproj
+++ b/Engine/QuantConnect.Lean.Engine.csproj
@@ -42,7 +42,7 @@
-
+
diff --git a/Indicators/QuantConnect.Indicators.csproj b/Indicators/QuantConnect.Indicators.csproj
index e6ef9ae9ec93..8aceac03f435 100644
--- a/Indicators/QuantConnect.Indicators.csproj
+++ b/Indicators/QuantConnect.Indicators.csproj
@@ -31,7 +31,7 @@
-
+
diff --git a/Report/QuantConnect.Report.csproj b/Report/QuantConnect.Report.csproj
index 877463b21b5b..0dd77fa7b918 100644
--- a/Report/QuantConnect.Report.csproj
+++ b/Report/QuantConnect.Report.csproj
@@ -41,7 +41,7 @@
LICENSE
-
+
diff --git a/Research/QuantConnect.Research.csproj b/Research/QuantConnect.Research.csproj
index 800d71bda52a..d4e375064bc5 100644
--- a/Research/QuantConnect.Research.csproj
+++ b/Research/QuantConnect.Research.csproj
@@ -33,7 +33,7 @@
-
+
diff --git a/Tests/Python/AlgorithmPythonWrapperTests.cs b/Tests/Python/AlgorithmPythonWrapperTests.cs
index a4b99d6db795..f2b406c9482c 100644
--- a/Tests/Python/AlgorithmPythonWrapperTests.cs
+++ b/Tests/Python/AlgorithmPythonWrapperTests.cs
@@ -35,7 +35,7 @@ public void Setup()
_baseCode = File.ReadAllText(Path.Combine("./RegressionAlgorithms", "Test_AlgorithmPythonWrapper.py"));
}
- [Test]
+ [TestCase("")]
[TestCase("def OnEndOfDay(self): pass")]
[TestCase("def OnEndOfDay(self, symbol): pass")]
public void CallOnEndOfDayDoesNotThrow(string code)
diff --git a/Tests/Python/PandasConverterTests.BackwardsCompatibility.cs b/Tests/Python/PandasConverterTests.BackwardsCompatibility.cs
index db274b27501b..448774b40a44 100644
--- a/Tests/Python/PandasConverterTests.BackwardsCompatibility.cs
+++ b/Tests/Python/PandasConverterTests.BackwardsCompatibility.cs
@@ -31,10 +31,15 @@ public partial class PandasConverterTests
[Test, TestCaseSource(nameof(TestDataFrameNonExceptionFunctions))]
public void BackwardsCompatibilityDataFrameDataFrameNonExceptionFunctions(string method, string index, bool cache)
{
- if(method == ".to_orc()" && OS.IsWindows)
+ if(method == ".to_orc()")
{
- // not supported in windows
- return;
+ if (OS.IsWindows)
+ {
+ // not supported in windows
+ return;
+ }
+ // orc does not support serializing a non-default index for the index; you can .reset_index() to make the index into column(s)
+ method = $".reset_index(){method}";
}
if (cache) SymbolCache.Set("SPY", Symbols.SPY);
diff --git a/Tests/Python/PandasConverterTests.cs b/Tests/Python/PandasConverterTests.cs
index 1ffbc4066c10..fb3c25bf6ff0 100644
--- a/Tests/Python/PandasConverterTests.cs
+++ b/Tests/Python/PandasConverterTests.cs
@@ -1081,7 +1081,7 @@ public void BackwardsCompatibilityDataFrame_groupby(string index, bool cache = f
import pandas as pd
def Test(df, other, symbol):
df = pd.concat([df, other])
- df = df.groupby(level=0).mean()
+ df = df.groupby(level=0).mean(numeric_only=True)
data = df.lastprice.loc[{index}]
if data is 0:
raise Exception('Data is zero')").GetAttr("Test");
@@ -1153,9 +1153,6 @@ def Test(dataFrame, symbol):
[TestCase("items", "'SPY'", true)]
[TestCase("items", "symbol")]
[TestCase("items", "str(symbol.ID)")]
- [TestCase("iteritems", "'SPY'", true)]
- [TestCase("iteritems", "symbol")]
- [TestCase("iteritems", "str(symbol.ID)")]
public void BackwardsCompatibilityDataFrame_items(string method, string index, bool cache = false)
{
if (cache) SymbolCache.Set("SPY", Symbols.SPY);
@@ -1457,7 +1454,7 @@ public void BackwardsCompatibilityDataFrame_pivot_table(string index, bool cache
import pandas as pd
def Test(dataFrame, symbol):
df = dataFrame.reset_index()
- table = pd.pivot_table(df, index=['symbol', 'time'])
+ table = pd.pivot_table(df, index=['symbol', 'time'], aggfunc='first')
data = table.lastprice.unstack(0)
data = data[{index}]
if data is 0:
@@ -1526,7 +1523,7 @@ public void BackwardsCompatibilityDataFrame_pipe(string index, bool cache = fals
import pandas as pd
def Test(dataFrame, other, symbol):
def mean_by_group(dataframe, level):
- return dataframe.groupby(level=level).mean()
+ return dataframe.groupby(level=level).mean(numeric_only=True)
df = pd.concat([dataFrame, other])
data = df.pipe(mean_by_group, level=0)
@@ -1723,7 +1720,7 @@ public void BackwardsCompatibilityDataFrame_rolling(string index, bool cache = f
dynamic test = PyModule.FromString("testModule",
$@"
def Test(dataFrame, symbol):
- data = dataFrame.rolling(2).sum()
+ data = dataFrame.rolling(2).sum(numeric_only=True)
data = data.lastprice.unstack(0)
data = data[{index}]
if data is 0:
@@ -1785,7 +1782,7 @@ def Test(dataFrame, symbol):
[TestCase("'SPY'", true)]
[TestCase("symbol")]
[TestCase("str(symbol.ID)")]
- public void BackwardsCompatibilityDataFrame_slice_shift(string index, bool cache = false)
+ public void BackwardsCompatibilityDataFrame_shift(string index, bool cache = false)
{
if (cache) SymbolCache.Set("SPY", Symbols.SPY);
@@ -1794,7 +1791,7 @@ public void BackwardsCompatibilityDataFrame_slice_shift(string index, bool cache
dynamic test = PyModule.FromString("testModule",
$@"
def Test(dataFrame, symbol):
- data = dataFrame.slice_shift().lastprice.unstack(0)
+ data = dataFrame.shift().lastprice.unstack(0)
data = data[{index}]
if data is 0:
raise Exception('Data is zero')").GetAttr("Test");
@@ -1954,7 +1951,7 @@ def Test(dataFrame, symbol):
[TestCase("'SPY'", true)]
[TestCase("symbol")]
[TestCase("str(symbol.ID)")]
- public void BackwardsCompatibilityDataFrame_tshift(string index, bool cache = false)
+ public void BackwardsCompatibilityDataFrame_series_shift(string index, bool cache = false)
{
if (cache) SymbolCache.Set("SPY", Symbols.SPY);
@@ -1965,7 +1962,7 @@ public void BackwardsCompatibilityDataFrame_tshift(string index, bool cache = fa
from datetime import timedelta as d
def Test(dataFrame, symbol):
series = dataFrame.droplevel(0)
- data = series.tshift(freq=d(1))").GetAttr("Test");
+ data = series.shift(freq=d(1))").GetAttr("Test");
Assert.DoesNotThrow(() => test(GetTestDataFrame(Symbols.SPY), Symbols.SPY));
}
@@ -2668,26 +2665,6 @@ def mean_by_group(dataframe, level):
}
}
- [TestCase("'SPY'", true)]
- [TestCase("symbol")]
- [TestCase("str(symbol.ID)")]
- public void BackwardsCompatibilitySeries_pop(string index, bool cache = false)
- {
- if (cache) SymbolCache.Set("SPY", Symbols.SPY);
-
- using (Py.GIL())
- {
- dynamic test = PyModule.FromString("testModule",
- $@"
-def Test(dataFrame, symbol):
- data = dataFrame.lastprice.pop({index})
- if data is 0:
- raise Exception('Data is zero')").GetAttr("Test");
-
- Assert.DoesNotThrow(() => test(GetTestDataFrame(Symbols.SPY), Symbols.SPY));
- }
- }
-
[TestCase("'SPY'", true)]
[TestCase("symbol")]
[TestCase("str(symbol.ID)")]
@@ -2873,7 +2850,7 @@ def Test(dataFrame, symbol):
[TestCase("'SPY'", true)]
[TestCase("symbol")]
[TestCase("str(symbol.ID)")]
- public void BackwardsCompatibilitySeries_slice_shift(string index, bool cache = false)
+ public void BackwardsCompatibilitySeries_shift(string index, bool cache = false)
{
if (cache) SymbolCache.Set("SPY", Symbols.SPY);
@@ -2883,7 +2860,7 @@ public void BackwardsCompatibilitySeries_slice_shift(string index, bool cache =
$@"
def Test(dataFrame, symbol):
series = dataFrame.lastprice
- data = series.slice_shift()
+ data = series.shift()
data = data.loc[{index}]
if data is 0:
raise Exception('Data is zero')").GetAttr("Test");
@@ -3026,26 +3003,6 @@ def Test(dataFrame, symbol):
}
}
- [TestCase("'SPY'", true)]
- [TestCase("symbol")]
- [TestCase("str(symbol.ID)")]
- public void BackwardsCompatibilitySeries_tshift(string index, bool cache = false)
- {
- if (cache) SymbolCache.Set("SPY", Symbols.SPY);
-
- using (Py.GIL())
- {
- dynamic test = PyModule.FromString("testModule",
- $@"
-from datetime import timedelta as d
-def Test(dataFrame, symbol):
- series = dataFrame.lastprice.droplevel(0)
- data = series.tshift(freq=d(1))").GetAttr("Test");
-
- Assert.DoesNotThrow(() => test(GetTestDataFrame(Symbols.SPY), Symbols.SPY));
- }
- }
-
[TestCase("'SPY'", true)]
[TestCase("symbol")]
[TestCase("str(symbol.ID)")]
diff --git a/Tests/Python/PythonPackagesTests.cs b/Tests/Python/PythonPackagesTests.cs
index c035228f54b2..b7e55ec92196 100644
--- a/Tests/Python/PythonPackagesTests.cs
+++ b/Tests/Python/PythonPackagesTests.cs
@@ -23,6 +23,142 @@ namespace QuantConnect.Tests.Python
[TestFixture, Category("TravisExclude")]
public class PythonPackagesTests
{
+ [Test]
+ public void Pgmpy()
+ {
+ AssertCode(@"
+def RunTest():
+ from pgmpy.base import DAG
+ G = DAG()
+ G.add_node(node='a')
+ G.add_nodes_from(nodes=['a', 'b'])");
+ }
+
+ [Test]
+ public void Control()
+ {
+ AssertCode(@"
+def RunTest():
+ import numpy as np
+ import control
+
+ num1 = np.array([2])
+ den1 = np.array([1, 0])
+ num2 = np.array([3])
+ den2 = np.array([4, 1])
+ H1 = control.tf(num1, den1)
+ H2 = control.tf(num2, den2)
+
+ H = control.series(H1, H2)");
+ }
+
+ [Test]
+ public void PyCaret()
+ {
+ AssertCode(@"
+from pycaret.datasets import get_data
+from pycaret.classification import setup
+
+def RunTest():
+ data = get_data('diabetes')
+ s = setup(data, target = 'Class variable', session_id = 123)");
+ }
+
+ [Test]
+ public void NGBoost()
+ {
+ AssertCode(@"
+def RunTest():
+ from ngboost import NGBRegressor
+
+ from sklearn.model_selection import train_test_split
+ from sklearn.metrics import mean_squared_error
+ import pandas as pd
+ import numpy as np
+
+ #Load Boston housing dataset
+ data_url = ""http://lib.stat.cmu.edu/datasets/boston""
+ raw_df = pd.read_csv(data_url, sep=""\s+"", skiprows=22, header=None)
+ X = np.hstack([raw_df.values[::2, :], raw_df.values[1::2, :2]])
+ Y = raw_df.values[1::2, 2]
+
+ X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.2)
+
+ ngb = NGBRegressor().fit(X_train, Y_train)
+ Y_preds = ngb.predict(X_test)
+ Y_dists = ngb.pred_dist(X_test)
+
+ # test Mean Squared Error
+ test_MSE = mean_squared_error(Y_preds, Y_test)
+ print('Test MSE', test_MSE)
+
+ # test Negative Log Likelihood
+ test_NLL = -Y_dists.logpdf(Y_test).mean()
+ print('Test NLL', test_NLL)");
+ }
+
+ [Test]
+ public void MLFlow()
+ {
+ AssertCode(@"
+def RunTest():
+ import mlflow
+ from mlflow.models import infer_signature
+
+ import pandas as pd
+ from sklearn import datasets
+ from sklearn.model_selection import train_test_split
+ from sklearn.linear_model import LogisticRegression
+ from sklearn.metrics import accuracy_score, precision_score, recall_score, f1_score
+
+
+ # Load the Iris dataset
+ X, y = datasets.load_iris(return_X_y=True)
+
+ # Split the data into training and test sets
+ X_train, X_test, y_train, y_test = train_test_split(
+ X, y, test_size=0.2, random_state=42
+ )
+
+ # Define the model hyperparameters
+ params = {
+ ""solver"": ""lbfgs"",
+ ""max_iter"": 1000,
+ ""multi_class"": ""auto"",
+ ""random_state"": 8888,
+ }
+
+ # Train the model
+ lr = LogisticRegression(**params)
+ lr.fit(X_train, y_train)
+
+ # Predict on the test set
+ y_pred = lr.predict(X_test)
+
+ # Calculate metrics
+ accuracy = accuracy_score(y_test, y_pred)");
+ }
+
+ [Test]
+ public void TPOT()
+ {
+ AssertCode(@"
+def RunTest():
+ from tpot import TPOTClassifier
+ from sklearn.datasets import load_digits
+ from sklearn.model_selection import train_test_split
+
+ digits = load_digits()
+ X_train, X_test, y_train, y_test = train_test_split(digits.data, digits.target,
+ train_size=0.75, test_size=0.25)
+
+ pipeline_optimizer = TPOTClassifier(generations=5, population_size=2, cv=5,
+ random_state=42, verbosity=2)
+ pipeline_optimizer.fit(X_train, y_train)
+ print(pipeline_optimizer.score(X_test, y_test))
+ pipeline_optimizer.export('tpot_exported_pipeline.py')");
+ }
+
[Test, Explicit("Needs to be run by itself to avoid hanging")]
public void XTransformers()
{
@@ -60,14 +196,14 @@ public void Functime()
@"
import polars as pl
from functime.cross_validation import train_test_split
-from functime.feature_extraction import add_fourier_terms
+from functime.seasonality import add_fourier_terms
from functime.forecasting import linear_model
from functime.preprocessing import scale
from functime.metrics import mase
def RunTest():
# Load commodities price data
- y = pl.read_parquet(""https://github.com/descendant-ai/functime/raw/main/data/commodities.parquet"")
+ y = pl.read_parquet(""https://github.com/functime-org/functime/raw/main/data/commodities.parquet"")
entity_col, time_col = y.columns[:2]
# Time series split
@@ -200,31 +336,6 @@ def RunTest():
classifier('We are very happy to introduce pipeline to the transformers repository.')");
}
- [Test]
- public void Tick()
- {
- AssertCode(
- @"
-import numpy as np
-
-from tick.dataset import fetch_hawkes_bund_data
-from tick.hawkes import HawkesConditionalLaw
-from tick.plot import plot_hawkes_kernel_norms
-
-def RunTest():
- timestamps_list = fetch_hawkes_bund_data()
-
- kernel_discretization = np.hstack((0, np.logspace(-5, 0, 50)))
- hawkes_learner = HawkesConditionalLaw(
- claw_method=""log"", delta_lag=0.1, min_lag=5e-4, max_lag=500,
- quad_method=""log"", n_quad=10, min_support=1e-4, max_support=1, n_threads=4)
-
- hawkes_learner.fit(timestamps_list)
-
- plot_hawkes_kernel_norms(hawkes_learner,
- node_names=[""P_u"", ""P_d"", ""T_a"", ""T_b""])");
- }
-
[Test]
public void FixedEffectModel()
{
@@ -275,8 +386,8 @@ def RunTest():
schools_code = """"""
data {
int J; // number of schools
- real y[J]; // estimated treatment effects
- real sigma[J]; // standard error of effect estimates
+ array[J] real y; // estimated treatment effects
+ array[J] real sigma; // standard error of effect estimates
}
parameters {
real mu; // population treatment effect
@@ -580,7 +691,7 @@ def RunTest():
);
}
- [Test]
+ [Test, Explicit("Should be run by itself to avoid matplotlib defaulting to use non existing latex")]
public void ShapTest()
{
AssertCode(
@@ -680,7 +791,7 @@ public void IgniteTest()
import ignite
def RunTest():
- assert(ignite.__version__ == '0.4.12')"
+ assert(ignite.__version__ == '0.4.13')"
);
}
@@ -1294,7 +1405,7 @@ def RunTest():
print('Number of variables =', solver.NumVariables())");
}
- [Test]
+ [Test, Explicit("Requires old version of TF, addons are winding down")]
public void TensorflowAddons()
{
AssertCode(
@@ -1499,7 +1610,7 @@ def RunTest():
print(sorted(Counter(y_resampled).items()))");
}
- [Test, Explicit("Has issues when run along side the other tests")]
+ [Test, Explicit("Requires keras < 3")]
public void ScikerasTest()
{
AssertCode(
@@ -1560,22 +1671,6 @@ def RunTest():
models,predictions = clf.fit(X_train, X_test, y_train, y_test)");
}
- [Test]
- public void Fracdiff()
- {
- AssertCode(
- @"
-import numpy as np
-from fracdiff import fdiff
-
-def RunTest():
- a = np.array([1, 2, 4, 7, 0])
- fdiff(a, 0.5)
- # array([ 1. , 1.5 , 2.875 , 4.6875 , -4.1640625])
- np.array_equal(fdiff(a, n=1), np.diff(a, n=1))
- # True");
- }
-
[Test]
public void Darts()
{
@@ -1769,34 +1864,6 @@ def RunTest():
);
}
- [Test]
- public void ScikitMultiflowTest()
- {
- AssertCode(
- @"
-from skmultiflow.data import WaveformGenerator
-from skmultiflow.trees import HoeffdingTree
-from skmultiflow.evaluation import EvaluatePrequential
-
-def RunTest():
- # 1. Create a stream
- stream = WaveformGenerator()
- stream.prepare_for_use()
-
- # 2. Instantiate the HoeffdingTree classifier
- ht = HoeffdingTree()
-
- # 3. Setup the evaluator
- evaluator = EvaluatePrequential(show_plot=False,
- pretrain_size=200,
- max_samples=20000)
-
- # 4. Run evaluation
- evaluator.evaluate(stream=stream, model=ht)
- return 'Test passed, module exists'"
- );
- }
-
[Test]
public void ScikitOptimizeTest()
{
@@ -2101,39 +2168,6 @@ import clr
);
}
- [Test]
- public void Tigramite()
- {
- AssertCode(@"
-import numpy as np
-from tigramite.pcmci import PCMCI
-from tigramite.independence_tests.parcorr import ParCorr
-import tigramite.data_processing as pp
-from tigramite.toymodels import structural_causal_processes as toys
-
-def RunTest():
- # Example process to play around with
- # Each key refers to a variable and the incoming links are supplied
- # as a list of format [((var, -lag), coeff, function), ...]
- def lin_f(x): return x
- links = {0: [((0, -1), 0.9, lin_f)],
- 1: [((1, -1), 0.8, lin_f), ((0, -1), 0.8, lin_f)],
- 2: [((2, -1), 0.7, lin_f), ((1, 0), 0.6, lin_f)],
- 3: [((3, -1), 0.7, lin_f), ((2, 0), -0.5, lin_f)],
- }
- data, nonstat = toys.structural_causal_process(links,
- T=1000, seed=7)
- # Data must be array of shape (time, variables)
- print (data.shape)
- (1000, 4)
- dataframe = pp.DataFrame(data)
- cond_ind_test = ParCorr()
- pcmci = PCMCI(dataframe=dataframe, cond_ind_test=cond_ind_test)
- results = pcmci.run_pcmciplus(tau_min=0, tau_max=2, pc_alpha=0.01)
- pcmci.print_results(results, alpha_level=0.01)
-");
- }
-
[Test, Explicit("Sometimes hangs when run along side the other tests")]
public void AxPlatformTest()
{
@@ -2202,37 +2236,35 @@ def RunTest():
///
/// The module we are testing
/// The module version
- [TestCase("pulp", "2.7.0", "VERSION")]
- [TestCase("pymc", "5.6.1", "__version__")]
+ [TestCase("pulp", "2.8.0", "VERSION")]
+ [TestCase("pymc", "5.10.4", "__version__")]
[TestCase("pypfopt", "pypfopt", "__name__")]
- [TestCase("wrapt", "1.14.1", "__version__")]
- [TestCase("tslearn", "0.6.2", "__version__")]
+ [TestCase("wrapt", "1.16.0", "__version__")]
+ [TestCase("tslearn", "0.6.3", "__version__")]
[TestCase("tweepy", "4.14.0", "__version__")]
- [TestCase("pywt", "1.4.1", "__version__")]
- [TestCase("umap", "0.5.3", "__version__")]
- [TestCase("dtw", "1.3.0", "__version__")]
+ [TestCase("pywt", "1.5.0", "__version__")]
+ [TestCase("umap", "0.5.5", "__version__")]
+ [TestCase("dtw", "1.3.1", "__version__")]
[TestCase("mplfinance", "0.12.10b0", "__version__")]
[TestCase("cufflinks", "0.17.3", "__version__")]
- [TestCase("ipywidgets", "8.1.1", "__version__")]
- [TestCase("astropy", "5.2.2", "__version__")]
- [TestCase("gluonts", "0.13.7", "__version__")]
+ [TestCase("ipywidgets", "8.1.2", "__version__")]
+ [TestCase("astropy", "6.0.0", "__version__")]
+ [TestCase("gluonts", "0.14.4", "__version__")]
[TestCase("gplearn", "0.4.2", "__version__")]
- [TestCase("featuretools", "1.27.0", "__version__")]
- [TestCase("pennylane", "0.32.0", "version()")]
+ [TestCase("featuretools", "1.30.0", "__version__")]
+ [TestCase("pennylane", "0.35.1", "version()")]
[TestCase("pyfolio", "0.9.5", "__version__")]
- [TestCase("altair", "5.1.2", "__version__")]
- [TestCase("modin", "0.22.3", "__version__")]
- [TestCase("persim", "0.3.1", "__version__")]
- [TestCase("pydmd", "0.4.1.post2308", "__version__")]
+ [TestCase("altair", "5.2.0", "__version__")]
+ [TestCase("modin", "0.26.1", "__version__")]
+ [TestCase("persim", "0.3.5", "__version__")]
+ [TestCase("pydmd", "1.0.0", "__version__")]
[TestCase("pandas_ta", "0.3.14b0", "__version__")]
[TestCase("tensortrade", "1.0.3", "__version__")]
[TestCase("quantstats", "0.0.62", "__version__")]
- [TestCase("autokeras", "1.1.0", "__version__")]
- [TestCase("panel", "1.2.3", "__version__")]
+ [TestCase("panel", "1.3.8", "__version__")]
[TestCase("pyheat", "pyheat", "__name__")]
- [TestCase("tensorflow_decision_forests", "1.5.0", "__version__")]
- [TestCase("tensorflow_ranking", "0.5.3.dev", "__version__")]
- [TestCase("pomegranate", "1.0.3", "__version__")]
+ [TestCase("tensorflow_decision_forests", "1.9.0", "__version__")]
+ [TestCase("pomegranate", "1.0.4", "__version__")]
public void ModuleVersionTest(string module, string value, string attribute)
{
AssertCode(
diff --git a/Tests/QuantConnect.Tests.csproj b/Tests/QuantConnect.Tests.csproj
index 76025ef053f5..be02ff925f57 100644
--- a/Tests/QuantConnect.Tests.csproj
+++ b/Tests/QuantConnect.Tests.csproj
@@ -32,7 +32,7 @@
-
+