Skip to content

Commit

Permalink
fix: modify the archived version & update code to download only the a… (
Browse files Browse the repository at this point in the history
  • Loading branch information
yoonhyejin authored Aug 23, 2024
1 parent ef6a410 commit e1a2908
Show file tree
Hide file tree
Showing 2 changed files with 16 additions and 13 deletions.
8 changes: 0 additions & 8 deletions docs-website/docusaurus.config.js
Original file line number Diff line number Diff line change
Expand Up @@ -170,14 +170,6 @@ module.exports = {
value: '<div class="dropdown__link"><b>Archived versions</b></div>',
},
{
value: `
<a class="dropdown__link" href="https://docs-website-qou70o69f-acryldata.vercel.app/docs/features">0.14.0
<svg width="12" height="12" aria-hidden="true" viewBox="0 0 24 24"><path fill="currentColor" d="M21 13v10h-21v-19h12v2h-10v15h17v-8h2zm3-12h-10.988l4.035 4-6.977 7.07 2.828 2.828 6.977-7.07 4.125 4.172v-11z"></path></svg>
</a>
`,
type: "html",
},
{
value: `
<a class="dropdown__link" href="https://docs-website-lzxh86531-acryldata.vercel.app/docs/features">0.13.0
<svg width="12" height="12" aria-hidden="true" viewBox="0 0 24 24"><path fill="currentColor" d="M21 13v10h-21v-19h12v2h-10v15h17v-8h2zm3-12h-10.988l4.035 4-6.977 7.07 2.828 2.828 6.977-7.07 4.125 4.172v-11z"></path></svg>
Expand Down
21 changes: 16 additions & 5 deletions docs-website/download_historical_versions.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import tarfile
import time
import urllib.request
import shutil

repo_url = "https://api.github.com/repos/datahub-project/static-assets"

Expand All @@ -18,7 +19,7 @@ def download_file(url, destination):


def fetch_urls(
repo_url: str, folder_path: str, file_format: str, max_retries=3, retry_delay=5
repo_url: str, folder_path: str, file_format: str, active_versions: list, max_retries=3, retry_delay=5
):
api_url = f"{repo_url}/contents/{folder_path}"
for attempt in range(max_retries + 1):
Expand All @@ -30,7 +31,7 @@ def fetch_urls(
urls = [
file["download_url"]
for file in json.loads(data)
if file["name"].endswith(file_format)
if file["name"].endswith(file_format) and any(version in file["name"] for version in active_versions)
]
print(urls)
return urls
Expand All @@ -48,12 +49,22 @@ def extract_tar_file(destination_path):
tar.extractall()
os.remove(destination_path)

def get_active_versions():
# read versions.json
with open("versions.json") as f:
versions = json.load(f)
return versions

def clear_directory(directory):
if os.path.exists(directory):
shutil.rmtree(directory)
os.makedirs(directory)

def download_versioned_docs(folder_path: str, destination_dir: str, file_format: str):
if not os.path.exists(destination_dir):
os.makedirs(destination_dir)
clear_directory(destination_dir) # Clear the directory before downloading

urls = fetch_urls(repo_url, folder_path, file_format)
active_versions = get_active_versions()
urls = fetch_urls(repo_url, folder_path, file_format, active_versions)

for url in urls:
filename = os.path.basename(url)
Expand Down

0 comments on commit e1a2908

Please sign in to comment.