diff --git a/.github/workflows/build_and_deploy.yml b/.github/workflows/build_and_deploy.yml new file mode 100644 index 00000000..8edeeee4 --- /dev/null +++ b/.github/workflows/build_and_deploy.yml @@ -0,0 +1,82 @@ +name: Build and Deploy +run-name: ${{ github.workflow }} - ${{ github.event.head_commit.message }} + +on: + push: + branches: + - main + +concurrency: + group: ${{ github.workflow}} + cancel-in-progress: true + +env: + PYTHON_VERSION: 3.x + +permissions: + contents: write + id-token: write + pages: write + +jobs: + build_and_deploy: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + fetch-depth: 0 + sparse-checkout: | + docs + + - name: Set up Python runtime + uses: actions/setup-python@v4 + with: + python-version: ${{ env.PYTHON_VERSION }} + cache: pip + cache-dependency-path: | + requirements.txt + + - name: Set up build cache + uses: actions/cache/restore@v3 + with: + key: mkdocs-material-${{ hashfiles('.cache/**') }} + path: .cache + restore-keys: | + mkdocs-material- + + - name: Install dependencies + run: sudo apt-get install pngquant + + - name: Install Python dependencies + run: | + pip install -U pip + pip install -r requirements.txt + pip install mkdocs-material[recommended,git,imaging] + + - name: Build documentation + env: + GH_TOKEN: ${{ secrets.GH_TOKEN }} + run: | + mkdocs build --clean + mkdocs --version + + # - name: Adjust permissions + # run: | + # chmod -c -R +rX site/ | while read line; do + # echo "::warning title=Invalid file permissions automatically fixed::$line" + # done + + - name: Upload to GitHub Pages + uses: actions/upload-pages-artifact@v2 + with: + path: site + + - name: Deploy to GitHub Pages + uses: actions/deploy-pages@v2 + + - name: Save build cache + uses: actions/cache/save@v3 + with: + key: mkdocs-material-${{ hashfiles('.cache/**') }} + path: .cache diff --git a/.github/workflows/dependabot.yml b/.github/workflows/dependabot.yml new file mode 100644 index 00000000..214a6617 --- /dev/null +++ b/.github/workflows/dependabot.yml @@ -0,0 +1,45 @@ +# Copyright (c) 2016-2023 Martin Donath + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. + +version: 2 +updates: + - package-ecosystem: npm + open-pull-requests-limit: 10 + directory: "/" + labels: [] + schedule: + interval: weekly + time: "04:00" + - package-ecosystem: pip + # We only want to bump versions of packages in case of security updates, as + # we want to keep maximum compatibility - see https://t.ly/INSR_ + open-pull-requests-limit: 0 + directory: "/" + labels: [] + schedule: + interval: weekly + time: "04:00" + - package-ecosystem: github-actions + open-pull-requests-limit: 10 + directory: "/" + labels: [] + schedule: + interval: weekly + time: "04:00" diff --git a/.github/workflows/sync-to-gitee.yml b/.github/workflows/sync-to-gitee.yml index 6fd95c05..d80bd9ca 100644 --- a/.github/workflows/sync-to-gitee.yml +++ b/.github/workflows/sync-to-gitee.yml @@ -1,10 +1,15 @@ name: Sync to gitee +run-name: ${{ github.workflow }} - ${{ github.event.head_commit.message }} on: push: branches: - main +concurrency: + group: ${{ github.workflow}} + cancel-in-progress: true + jobs: sync_to_gitee: runs-on: ubuntu-latest diff --git a/.gitignore b/.gitignore index 065aea21..5e4b470c 100644 --- a/.gitignore +++ b/.gitignore @@ -1,32 +1,163 @@ -# Vim -*~ -*.sw[p_] - -# Sublime Text -*.sublime-project -*.sublime-workspace - -# Ruby Gem -*.gem -.bundle -Gemfile.lock -**/vendor/bundle - -# Node.js and NPM -node_modules -npm-debug.log* -package-lock.json -codekit-config.json - -# macOS -.DS_Store - -# Jekyll generated files -.jekyll-cache -.jekyll-metadata -.sass-cache -_asset_bundler_cache -_site - -.venv/ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/#use-with-ide +.pdm.toml + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ + +.vscode/ +site/ diff --git a/.markdownlint.yaml b/.markdownlint.yaml new file mode 100644 index 00000000..f410ccd0 --- /dev/null +++ b/.markdownlint.yaml @@ -0,0 +1,273 @@ +# Example markdownlint configuration with all properties set to their default value + +# Default state for all rules +default: true + +# Path to configuration file to extend +extends: null + +# MD001/heading-increment : Heading levels should only increment by one level at a time : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md001.md +MD001: true + +# MD003/heading-style : Heading style : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md003.md +MD003: + # Heading style + style: "consistent" + +# MD004/ul-style : Unordered list style : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md004.md +MD004: + # List style + style: "consistent" + +# MD005/list-indent : Inconsistent indentation for list items at the same level : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md005.md +MD005: true + +# MD007/ul-indent : Unordered list indentation : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md007.md +MD007: + # Spaces for indent + indent: 4 + # indent: 2 + # Whether to indent the first level of the list + start_indented: false + # Spaces for first level indent (when start_indented is set) + start_indent: 2 + +# MD009/no-trailing-spaces : Trailing spaces : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md009.md +MD009: + # Spaces for line break + br_spaces: 2 + # Allow spaces for empty lines in list items + list_item_empty_lines: false + # Include unnecessary breaks + strict: false + +# MD010/no-hard-tabs : Hard tabs : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md010.md +MD010: + # Include code blocks + code_blocks: true + # Fenced code languages to ignore + ignore_code_languages: [] + # Number of spaces for each hard tab + spaces_per_tab: 1 + +# MD011/no-reversed-links : Reversed link syntax : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md011.md +MD011: true + +# MD012/no-multiple-blanks : Multiple consecutive blank lines : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md012.md +MD012: + # Consecutive blank lines + maximum: 1 + +# MD013/line-length : Line length : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md013.md +MD013: false +# MD013: +# # Number of characters +# line_length: 80 +# # Number of characters for headings +# heading_line_length: 80 +# # Number of characters for code blocks +# code_block_line_length: 80 +# # Include code blocks +# code_blocks: true +# # Include tables +# tables: true +# # Include headings +# headings: true +# # Strict length checking +# strict: false +# # Stern length checking +# stern: false + +# MD014/commands-show-output : Dollar signs used before commands without showing output : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md014.md +MD014: true + +# MD018/no-missing-space-atx : No space after hash on atx style heading : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md018.md +MD018: true + +# MD019/no-multiple-space-atx : Multiple spaces after hash on atx style heading : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md019.md +MD019: true + +# MD020/no-missing-space-closed-atx : No space inside hashes on closed atx style heading : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md020.md +MD020: true + +# MD021/no-multiple-space-closed-atx : Multiple spaces inside hashes on closed atx style heading : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md021.md +MD021: true + +# MD022/blanks-around-headings : Headings should be surrounded by blank lines : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md022.md +MD022: + # Blank lines above heading + lines_above: 1 + # Blank lines below heading + lines_below: 1 + +# MD023/heading-start-left : Headings must start at the beginning of the line : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md023.md +MD023: true + +# MD024/no-duplicate-heading : Multiple headings with the same content : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md024.md +MD024: + # Only check sibling headings + allow_different_nesting: false + # Only check sibling headings + siblings_only: false + +# MD025/single-title/single-h1 : Multiple top-level headings in the same document : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md025.md +MD025: + # Heading level + level: 1 + # RegExp for matching title in front matter + front_matter_title: "^\\s*title\\s*[:=]" + +# MD026/no-trailing-punctuation : Trailing punctuation in heading : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md026.md +MD026: + # Punctuation characters + punctuation: ".,;:!。,;:!" + +# MD027/no-multiple-space-blockquote : Multiple spaces after blockquote symbol : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md027.md +MD027: true + +# MD028/no-blanks-blockquote : Blank line inside blockquote : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md028.md +MD028: true + +# MD029/ol-prefix : Ordered list item prefix : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md029.md +MD029: + # List style + style: "one_or_ordered" + +# MD030/list-marker-space : Spaces after list markers : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md030.md +MD030: + # Spaces for single-line unordered list items + ul_single: 1 + # Spaces for single-line ordered list items + ol_single: 1 + # Spaces for multi-line unordered list items + ul_multi: 1 + # Spaces for multi-line ordered list items + ol_multi: 1 + +# MD031/blanks-around-fences : Fenced code blocks should be surrounded by blank lines : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md031.md +MD031: + # Include list items + list_items: true + +# MD032/blanks-around-lists : Lists should be surrounded by blank lines : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md032.md +MD032: true + +# MD033/no-inline-html : Inline HTML : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md033.md +MD033: false +# MD033: true + # Allowed elements + # allowed_elements: [] + +# MD034/no-bare-urls : Bare URL used : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md034.md +MD034: false +# MD034: true + +# MD035/hr-style : Horizontal rule style : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md035.md +MD035: + # Horizontal rule style + style: "consistent" + +# MD036/no-emphasis-as-heading : Emphasis used instead of a heading : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md036.md +MD036: + # Punctuation characters + punctuation: ".,;:!?。,;:!?" + +# MD037/no-space-in-emphasis : Spaces inside emphasis markers : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md037.md +MD037: true + +# MD038/no-space-in-code : Spaces inside code span elements : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md038.md +MD038: true + +# MD039/no-space-in-links : Spaces inside link text : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md039.md +MD039: true + +# MD040/fenced-code-language : Fenced code blocks should have a language specified : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md040.md +MD040: + # List of languages + allowed_languages: [] + # Require language only + language_only: false + +# MD041/first-line-heading/first-line-h1 : First line in a file should be a top-level heading : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md041.md +MD041: + # Heading level + level: 1 + # RegExp for matching title in front matter + front_matter_title: "^\\s*title\\s*[:=]" + +# MD042/no-empty-links : No empty links : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md042.md +MD042: true + +# MD043/required-headings : Required heading structure : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md043.md +MD043: false +# MD043: +# # List of headings +# headings: [] +# # Match case of headings +# match_case: false + +# MD044/proper-names : Proper names should have the correct capitalization : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md044.md +MD044: + # List of proper names + names: [] + # Include code blocks + code_blocks: true + # Include HTML elements + html_elements: true + +# MD045/no-alt-text : Images should have alternate text (alt text) : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md045.md +MD045: false +# MD045: true + +# MD046/code-block-style : Code block style : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md046.md +MD046: false +# MD046: + # Block style + # style: "consistent" + +# MD047/single-trailing-newline : Files should end with a single newline character : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md047.md +MD047: true + +# MD048/code-fence-style : Code fence style : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md048.md +MD048: + # Code fence style + style: "consistent" + +# MD049/emphasis-style : Emphasis style : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md049.md +MD049: + # Emphasis style + style: "consistent" + +# MD050/strong-style : Strong style : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md050.md +MD050: + # Strong style + style: "consistent" + +# MD051/link-fragments : Link fragments should be valid : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md051.md +MD051: true + +# MD052/reference-links-images : Reference links and images should use a label that is defined : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md052.md +MD052: + # Include shortcut syntax + shortcut_syntax: false + +# MD053/link-image-reference-definitions : Link and image reference definitions should be needed : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md053.md +MD053: + # Ignored definitions + ignored_definitions: + - "//" + +# MD054/link-image-style : Link and image style : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md054.md +MD054: + # Allow autolinks + autolink: true + # Allow inline links and images + inline: true + # Allow full reference links and images + full: true + # Allow collapsed reference links and images + collapsed: true + # Allow shortcut reference links and images + shortcut: true + # Allow URLs as inline links + url_inline: true diff --git a/.vscode/settings.json b/.vscode/settings.json deleted file mode 100644 index 985f98cb..00000000 --- a/.vscode/settings.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "cSpell.language": "en", - "pasteImage.path": "${projectRoot}/_image/blog/${currentFileNameWithoutExt}", - "pasteImage.basePath": "${projectRoot}/_image/blog/${currentFileNameWithoutExt}", - "pasteImage.forceUnixStyleSeparator": true, - "pasteImage.prefix": "/_image/blog/", - "workbench.colorCustomizations": { - "activityBar.activeBackground": "#fa1b49", - "activityBar.activeBorder": "#155e02", - "activityBar.background": "#fa1b49", - "activityBar.foreground": "#e7e7e7", - "activityBar.inactiveForeground": "#e7e7e799", - "activityBarBadge.background": "#155e02", - "activityBarBadge.foreground": "#e7e7e7", - "sash.hoverBorder": "#fa1b49", - "statusBar.background": "#dd0531", - "statusBar.foreground": "#e7e7e7", - "statusBarItem.hoverBackground": "#fa1b49", - "statusBarItem.remoteBackground": "#dd0531", - "statusBarItem.remoteForeground": "#e7e7e7", - "titleBar.activeBackground": "#dd0531", - "titleBar.activeForeground": "#e7e7e7", - "titleBar.inactiveBackground": "#dd053199", - "titleBar.inactiveForeground": "#e7e7e799", - "commandCenter.border": "#e7e7e799" - }, - "peacock.remoteColor": "#dd0531" -} diff --git a/README.md b/README.md index cac38172..b7ee4014 100644 --- a/README.md +++ b/README.md @@ -1,274 +1,5 @@ -# [Minimal Mistakes Jekyll theme](https://mmistakes.github.io/minimal-mistakes/) +# My blog -[![LICENSE](https://img.shields.io/badge/license-MIT-lightgrey.svg)](https://raw.githubusercontent.com/mmistakes/minimal-mistakes/master/LICENSE) -[![Jekyll](https://img.shields.io/badge/jekyll-%3E%3D%203.7-blue.svg)](https://jekyllrb.com/) -[![Ruby gem](https://img.shields.io/gem/v/minimal-mistakes-jekyll.svg)](https://rubygems.org/gems/minimal-mistakes-jekyll) -[![Tip Me via PayPal](https://img.shields.io/badge/PayPal-tip%20me-green.svg?logo=paypal)](https://www.paypal.me/mmistakes) -[![Donate to this project using Buy Me A Coffee](https://img.shields.io/badge/buy%20me%20a%20coffee-donate-yellow.svg)](https://www.buymeacoffee.com/mmistakes) +[![Build and Deploy to Github Pages](https://github.com/copdips/copdips.github.io/actions/workflows/build_and_deploy.yml/badge.svg)](https://github.com/copdips/copdips.github.io/actions/workflows/build_and_deploy.yml) -Minimal Mistakes is a flexible two-column Jekyll theme, perfect for building personal sites, blogs, and portfolios. As the name implies, styling is purposely minimalistic to be enhanced and customized by you :smile:. - -:sparkles: See what's new in the [CHANGELOG](CHANGELOG.md). - -**If you enjoy this theme, please consider sponsoring:** - -[!["Buy Me A Coffee"](https://user-images.githubusercontent.com/1376749/120938564-50c59780-c6e1-11eb-814f-22a0399623c5.png)](https://www.buymeacoffee.com/mmistakes) - [![Support via PayPal](https://cdn.jsdelivr.net/gh/twolfson/paypal-github-button@1.0.0/dist/button.svg)](https://www.paypal.me/mmistakes) - -**Note:** The theme uses the [jekyll-include-cache](https://github.com/benbalter/jekyll-include-cache) plugin which will need to be installed in your `Gemfile` and must be retained in the `plugins` array of `_config.yml`. Otherwise you'll encounter `Unknown tag 'include_cached'` errors at build. - -[![Minimal Mistakes live preview][2]][1] - -[1]: https://mmistakes.github.io/minimal-mistakes/ -[2]: screenshot.png (live preview) - -![layout examples](screenshot-layouts.png) - -## Notable features - -- Bundled as a "theme gem" for easier installation/upgrading. -- Compatible with GitHub Pages. -- Support for Jekyll's built-in Sass/SCSS preprocessor. -- Nine different skins (color variations). -- Several responsive layout options (single, archive index, search, splash, and paginated home page). -- Optimized for search engines with support for [Twitter Cards](https://dev.twitter.com/cards/overview) and [Open Graph](http://ogp.me/) data. -- Optional [header images](https://mmistakes.github.io/minimal-mistakes/docs/layouts/#headers), [custom sidebars](https://mmistakes.github.io/minimal-mistakes/docs/layouts/#sidebars), [table of contents](https://mmistakes.github.io/minimal-mistakes/docs/helpers/#table-of-contents), [galleries](https://mmistakes.github.io/minimal-mistakes/docs/helpers/#gallery), related posts, [breadcrumb links](https://mmistakes.github.io/minimal-mistakes/docs/configuration/#breadcrumb-navigation-beta), [navigation lists](https://mmistakes.github.io/minimal-mistakes/docs/helpers/#navigation-list), and more. -- Commenting support (powered by [Disqus](https://disqus.com/), [Facebook](https://developers.facebook.com/docs/plugins/comments), Google+, [Discourse](https://www.discourse.org/), static-based via [Staticman](https://staticman.net/), [utterances](https://utteranc.es/), and [giscus](https://giscus.app/)). -- [Google Analytics](https://www.google.com/analytics/) support. -- UI localized text in English (default), Arabic (عربي), Brazilian Portuguese (Português brasileiro), Catalan, Chinese, Danish, Dutch, Finnish, French (Français), German (Deutsch), Greek, Hebrew, Hindi (हिंदी), Hungarian, Indonesian, Irish (Gaeilge), Italian (Italiano), Japanese, Kiswahili, Korean, Malayalam, Myanmar (Burmese), Nepali (Nepalese), Norwegian (Norsk), Persian (فارسی), Polish, Punjabi (ਪੰਜਾਬੀ), Romanian, Russian, Slovak, Spanish (Español), Swedish, Thai, Turkish (Türkçe), and Vietnamese. - -## Skins (color variations) - -This theme comes in nine different skins (in addition to the default one). - -| `air` | `contrast` | `dark` | -| --- | --- | --- | -| [![air skin](https://mmistakes.github.io/minimal-mistakes/assets/images/air-skin-archive.png)](https://mmistakes.github.io/minimal-mistakes/assets/images/air-skin-archive-large.png) | [![contrast skin](https://mmistakes.github.io/minimal-mistakes/assets/images/contrast-skin-archive.png)](https://mmistakes.github.io/minimal-mistakes/assets/images/contrast-skin-archive-large.png) | [![dark skin](https://mmistakes.github.io/minimal-mistakes/assets/images/dark-skin-archive.png)](https://mmistakes.github.io/minimal-mistakes/assets/images/dark-skin-archive-large.png) | - -| `dirt` | `mint` | `sunrise` | -| --- | --- | --- | -| [![dirt skin](https://mmistakes.github.io/minimal-mistakes/assets/images/dirt-skin-archive.png)](https://mmistakes.github.io/minimal-mistakes/assets/images/dirt-skin-archive-large.png) | [![mint skin](https://mmistakes.github.io/minimal-mistakes/assets/images/mint-skin-archive.png)](https://mmistakes.github.io/minimal-mistakes/assets/images/mint-skin-archive-large.png) | [![sunrise skin](https://mmistakes.github.io/minimal-mistakes/assets/images/sunrise-skin-archive.png)](https://mmistakes.github.io/minimal-mistakes/assets/images/sunrise-skin-archive-large.png) | - -| `aqua` | `neon` | `plum` | -| --- | --- | --- | -| [![aqua skin](https://mmistakes.github.io/minimal-mistakes/assets/images/aqua-skin-archive.png)](https://mmistakes.github.io/minimal-mistakes/assets/images/aqua-skin-archive-large.png) | [![neon skin](https://mmistakes.github.io/minimal-mistakes/assets/images/neon-skin-archive.png)](https://mmistakes.github.io/minimal-mistakes/assets/images/neon-skin-archive-large.png) | [![plum skin](https://mmistakes.github.io/minimal-mistakes/assets/images/plum-skin-archive.png)](https://mmistakes.github.io/minimal-mistakes/assets/images/plum-skin-archive-large.png) | - -## Demo pages - -| Name | Description | -| ------------------------------------------- | ----------------------------------------------------- | -| [Post with Header Image][header-image-post] | A post with a large header image. | -| [HTML Tags and Formatting Post][html-tags-post] | A variety of common markup showing how the theme styles them. | -| [Syntax Highlighting Post][syntax-post] | Post displaying highlighted code. | -| [Post with a Gallery][gallery-post] | A post showing several images wrapped in `
` elements. | -| [Sample Collection Page][sample-collection] | Single page from a collection. | -| [Categories Archive][categories-archive] | Posts grouped by category. | -| [Tags Archive][tags-archive] | Posts grouped by tag. | - -Additional sample posts are available under [posts archive][year-archive] on the demo site. Source files for these (and the entire demo site) can be found in [`/docs`](docs). - -[header-image-post]: https://mmistakes.github.io/minimal-mistakes/layout-header-image-text-readability/ -[gallery-post]: https://mmistakes.github.io/minimal-mistakes/post%20formats/post-gallery/ -[html-tags-post]: https://mmistakes.github.io/minimal-mistakes/markup/markup-html-tags-and-formatting/ -[syntax-post]: https://mmistakes.github.io/minimal-mistakes/markup-syntax-highlighting/ -[sample-collection]: https://mmistakes.github.io/minimal-mistakes/recipes/chocolate-chip-cookies/ -[categories-archive]: https://mmistakes.github.io/minimal-mistakes/categories/ -[tags-archive]: https://mmistakes.github.io/minimal-mistakes/tags/ -[year-archive]: https://mmistakes.github.io/minimal-mistakes/year-archive/ - -## Installation - -There are three ways to install: as a [gem-based theme](https://jekyllrb.com/docs/themes/#understanding-gem-based-themes), as a [remote theme](https://blog.github.com/2017-11-29-use-any-theme-with-github-pages/) (GitHub Pages compatible), or forking/directly copying all of the theme files into your project. - -### Gem-based method - -With Gem-based themes, directories such as the `assets`, `_layouts`, `_includes`, and `_sass` are stored in the theme’s gem, hidden from your immediate view. Yet all of the necessary directories will be read and processed during Jekyll’s build process. - -This allows for easier installation and updating as you don't have to manage any of the theme files. To install: - -1. Add the following to your `Gemfile`: - - ```ruby - gem "minimal-mistakes-jekyll" - ``` - -2. Fetch and update bundled gems by running the following [Bundler](http://bundler.io/) command: - - ```bash - bundle - ``` - -3. Set the `theme` in your project's Jekyll `_config.yml` file: - - ```yaml - theme: minimal-mistakes-jekyll - ``` - -To update the theme run `bundle update`. - -### Remote theme method - -Remote themes are similar to Gem-based themes, but do not require `Gemfile` changes or whitelisting making them ideal for sites hosted with GitHub Pages. - -To install: - -1. Create/replace the contents of your `Gemfile` with the following: - - ```ruby - source "https://rubygems.org" - - gem "github-pages", group: :jekyll_plugins - gem "jekyll-include-cache", group: :jekyll_plugins - ``` - -2. Add `jekyll-include-cache` to the `plugins` array of your `_config.yml`. - -3. Fetch and update bundled gems by running the following [Bundler](http://bundler.io/) command: - - ```bash - bundle - ``` - -4. Add `remote_theme: "mmistakes/minimal-mistakes@4.24.0"` to your `_config.yml` file. Remove any other `theme:` or `remote_theme:` entry. - -**Looking for an example?** Use the [Minimal Mistakes remote theme starter](https://github.com/mmistakes/mm-github-pages-starter/generate) for the quickest method of getting a GitHub Pages hosted site up and running. Generate a new repository from the starter, replace sample content with your own, and configure as needed. - -## Usage - -For detailed instructions on how to configure, customize, add/migrate content, and more read the [theme's documentation](https://mmistakes.github.io/minimal-mistakes/docs/quick-start-guide/). - ---- - -## Contributing - -Found a typo in the documentation or interested in [fixing a bug](https://github.com/mmistakes/minimal-mistakes/issues)? Then by all means [submit an issue](https://github.com/mmistakes/minimal-mistakes/issues/new) or [pull request](https://help.github.com/articles/using-pull-requests/). If this is your first pull request, it may be helpful to read up on the [GitHub Flow](https://guides.github.com/introduction/flow/) first. - -For help with using the theme or general Jekyll support questions, please use the [Jekyll Talk forums](https://talk.jekyllrb.com/). - -### Pull Requests - -When submitting a pull request: - -1. Clone the repo. -2. Create a branch off of `master` and give it a meaningful name (e.g. `my-awesome-new-feature`). -3. Open a pull request on GitHub and describe the feature or fix. - -Theme documentation and demo pages can be found in the [`/docs`](docs) if submitting improvements, typo corrections, etc. - -## Development - -To set up your environment to develop this theme, run `bundle install`. - -To test the theme, run `bundle exec rake preview` and open your browser at `http://localhost:4000/test/`. This starts a Jekyll server using content in the `test/` directory. As modifications are made to the theme and test site, it will regenerate and you should see the changes in the browser after a refresh. - ---- - -## Credits - -### Creator - -**Michael Rose** - -- -- -- - -### Icons + Demo Images: - -- [The Noun Project](https://thenounproject.com) -- Garrett Knoll, Arthur Shlain, and [tracy tam](https://thenounproject.com/tracytam) -- [Font Awesome](http://fontawesome.io/) -- [Unsplash](https://unsplash.com/) - -### Other: - -- [Jekyll](http://jekyllrb.com/) -- [jQuery](http://jquery.com/) -- [Susy](http://susy.oddbird.net/) -- [Breakpoint](http://breakpoint-sass.com/) -- [Magnific Popup](http://dimsemenov.com/plugins/magnific-popup/) -- [FitVids.JS](http://fitvidsjs.com/) -- [GreedyNav.js](https://github.com/lukejacksonn/GreedyNav) -- [Smooth Scroll](https://github.com/cferdinandi/smooth-scroll) -- [Gumshoe](https://github.com/cferdinandi/gumshoe) -- [jQuery throttle / debounce](http://benalman.com/projects/jquery-throttle-debounce-plugin/) -- [Lunr](http://lunrjs.com) - ---- - -## License - -The MIT License (MIT) - -Copyright (c) 2013-2020 Michael Rose and contributors - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - -Minimal Mistakes incorporates icons from [The Noun Project](https://thenounproject.com/) -creators Garrett Knoll, Arthur Shlain, and tracy tam. -Icons are distributed under Creative Commons Attribution 3.0 United States (CC BY 3.0 US). - -Minimal Mistakes incorporates [Font Awesome](http://fontawesome.io/), -Copyright (c) 2017 Dave Gandy. -Font Awesome is distributed under the terms of the [SIL OFL 1.1](http://scripts.sil.org/OFL) -and [MIT License](http://opensource.org/licenses/MIT). - -Minimal Mistakes incorporates photographs from [Unsplash](https://unsplash.com). - -Minimal Mistakes incorporates [Susy](http://susy.oddbird.net/), -Copyright (c) 2017, Miriam Eric Suzanne. -Susy is distributed under the terms of the [BSD 3-clause "New" or "Revised" License](https://opensource.org/licenses/BSD-3-Clause). - -Minimal Mistakes incorporates [Breakpoint](http://breakpoint-sass.com/). -Breakpoint is distributed under the terms of the [MIT/GPL Licenses](http://opensource.org/licenses/MIT). - -Minimal Mistakes incorporates [FitVids.js](https://github.com/davatron5000/FitVids.js/), -Copyright (c) 2013 Dave Rubert and Chris Coyier. -FitVids is distributed under the terms of the [WTFPL License](http://www.wtfpl.net/). - -Minimal Mistakes incorporates [Magnific Popup](http://dimsemenov.com/plugins/magnific-popup/), -Copyright (c) 2014-2016 Dmitry Semenov, http://dimsemenov.com. -Magnific Popup is distributed under the terms of the MIT License. - -Minimal Mistakes incorporates [Smooth Scroll](http://github.com/cferdinandi/smooth-scroll), -Copyright (c) 2019 Chris Ferdinandi. -Smooth Scroll is distributed under the terms of the [MIT License](http://opensource.org/licenses/MIT). - -Minimal Mistakes incorporates [Gumshoejs](http://github.com/cferdinandi/gumshoe), -Copyright (c) 2019 Chris Ferdinandi. -Gumshoejs is distributed under the terms of the [MIT License](http://opensource.org/licenses/MIT). - -Minimal Mistakes incorporates [jQuery throttle / debounce](http://benalman.com/projects/jquery-throttle-debounce-plugin/), -Copyright (c) 2010 "Cowboy" Ben Alman. -jQuery throttle / debounce is distributed under the terms of the [MIT License](http://opensource.org/licenses/MIT). - -Minimal Mistakes incorporates [GreedyNav.js](https://github.com/lukejacksonn/GreedyNav), -Copyright (c) 2015 Luke Jackson. -GreedyNav.js is distributed under the terms of the [MIT License](http://opensource.org/licenses/MIT). - -Minimal Mistakes incorporates [Jekyll Group-By-Array](https://github.com/mushishi78/jekyll-group-by-array), -Copyright (c) 2015 Max White . -Jekyll Group-By-Array is distributed under the terms of the [MIT License](http://opensource.org/licenses/MIT). - -Minimal Mistakes incorporates [@allejo's Pure Liquid Jekyll Table of Contents](https://allejo.io/blog/a-jekyll-toc-in-liquid-only/), -Copyright (c) 2017 Vladimir Jimenez. -Pure Liquid Jekyll Table of Contents is distributed under the terms of the [MIT License](http://opensource.org/licenses/MIT). - -Minimal Mistakes incorporates [Lunr](http://lunrjs.com), -Copyright (c) 2018 Oliver Nightingale. -Lunr is distributed under the terms of the [MIT License](http://opensource.org/licenses/MIT). +https://copdips.com diff --git a/_archive_jekyll/.editorconfig b/_archive_jekyll/.editorconfig new file mode 100644 index 00000000..3a287c45 --- /dev/null +++ b/_archive_jekyll/.editorconfig @@ -0,0 +1,13 @@ +# editorconfig.org +root = true + +[*] +indent_style = space +indent_size = 2 +end_of_line = lf +charset = utf-8 +trim_trailing_whitespace = true +insert_final_newline = false + +[*.md] +trim_trailing_whitespace = false diff --git a/.gitattributes b/_archive_jekyll/.gitattributes similarity index 100% rename from .gitattributes rename to _archive_jekyll/.gitattributes diff --git a/.github/CONTRIBUTING.md b/_archive_jekyll/.github/CONTRIBUTING.md similarity index 100% rename from .github/CONTRIBUTING.md rename to _archive_jekyll/.github/CONTRIBUTING.md diff --git a/.github/FUNDING.yml b/_archive_jekyll/.github/FUNDING.yml similarity index 100% rename from .github/FUNDING.yml rename to _archive_jekyll/.github/FUNDING.yml diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/_archive_jekyll/.github/ISSUE_TEMPLATE/bug_report.md similarity index 100% rename from .github/ISSUE_TEMPLATE/bug_report.md rename to _archive_jekyll/.github/ISSUE_TEMPLATE/bug_report.md diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/_archive_jekyll/.github/ISSUE_TEMPLATE/bug_report.yml similarity index 100% rename from .github/ISSUE_TEMPLATE/bug_report.yml rename to _archive_jekyll/.github/ISSUE_TEMPLATE/bug_report.yml diff --git a/.github/ISSUE_TEMPLATE/config.yml b/_archive_jekyll/.github/ISSUE_TEMPLATE/config.yml similarity index 100% rename from .github/ISSUE_TEMPLATE/config.yml rename to _archive_jekyll/.github/ISSUE_TEMPLATE/config.yml diff --git a/.github/ISSUE_TEMPLATE/documentation.md b/_archive_jekyll/.github/ISSUE_TEMPLATE/documentation.md similarity index 100% rename from .github/ISSUE_TEMPLATE/documentation.md rename to _archive_jekyll/.github/ISSUE_TEMPLATE/documentation.md diff --git a/.github/ISSUE_TEMPLATE/documentation.yml b/_archive_jekyll/.github/ISSUE_TEMPLATE/documentation.yml similarity index 100% rename from .github/ISSUE_TEMPLATE/documentation.yml rename to _archive_jekyll/.github/ISSUE_TEMPLATE/documentation.yml diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/_archive_jekyll/.github/PULL_REQUEST_TEMPLATE.md similarity index 100% rename from .github/PULL_REQUEST_TEMPLATE.md rename to _archive_jekyll/.github/PULL_REQUEST_TEMPLATE.md diff --git a/.github/workflows/bad-pr.yml b/_archive_jekyll/.github/workflows/bad-pr.yml similarity index 100% rename from .github/workflows/bad-pr.yml rename to _archive_jekyll/.github/workflows/bad-pr.yml diff --git a/.github/workflows/build-jekyll.yml b/_archive_jekyll/.github/workflows/build-jekyll.yml similarity index 100% rename from .github/workflows/build-jekyll.yml rename to _archive_jekyll/.github/workflows/build-jekyll.yml diff --git a/_archive_jekyll/.github/workflows/sync-to-gitee.yml b/_archive_jekyll/.github/workflows/sync-to-gitee.yml new file mode 100644 index 00000000..6fd95c05 --- /dev/null +++ b/_archive_jekyll/.github/workflows/sync-to-gitee.yml @@ -0,0 +1,19 @@ +name: Sync to gitee + +on: + push: + branches: + - main + +jobs: + sync_to_gitee: + runs-on: ubuntu-latest + steps: + # - uses: actions/checkout@v2 + + - name: sync-gitee-mirror + uses: abersheeran/sync-gitee-mirror@v1-beta + with: + repository: copdips/copdips.github.io + username: ${{ secrets.GITEE_USERNAME }} + password: ${{ secrets.GITEE_PAT }} diff --git a/_archive_jekyll/.gitignore b/_archive_jekyll/.gitignore new file mode 100644 index 00000000..065aea21 --- /dev/null +++ b/_archive_jekyll/.gitignore @@ -0,0 +1,32 @@ +# Vim +*~ +*.sw[p_] + +# Sublime Text +*.sublime-project +*.sublime-workspace + +# Ruby Gem +*.gem +.bundle +Gemfile.lock +**/vendor/bundle + +# Node.js and NPM +node_modules +npm-debug.log* +package-lock.json +codekit-config.json + +# macOS +.DS_Store + +# Jekyll generated files +.jekyll-cache +.jekyll-metadata +.sass-cache +_asset_bundler_cache +_site + +.venv/ +venv/ diff --git a/.travis.yml b/_archive_jekyll/.travis.yml similarity index 100% rename from .travis.yml rename to _archive_jekyll/.travis.yml diff --git a/CHANGELOG.md b/_archive_jekyll/CHANGELOG.md similarity index 100% rename from CHANGELOG.md rename to _archive_jekyll/CHANGELOG.md diff --git a/_archive_jekyll/CNAME b/_archive_jekyll/CNAME new file mode 100644 index 00000000..0cae7cad --- /dev/null +++ b/_archive_jekyll/CNAME @@ -0,0 +1 @@ +copdips.com \ No newline at end of file diff --git a/Gemfile b/_archive_jekyll/Gemfile similarity index 100% rename from Gemfile rename to _archive_jekyll/Gemfile diff --git a/_archive_jekyll/LICENSE b/_archive_jekyll/LICENSE new file mode 100644 index 00000000..3e733ff3 --- /dev/null +++ b/_archive_jekyll/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2013-2020 Michael Rose and contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/_archive_jekyll/README.md b/_archive_jekyll/README.md new file mode 100644 index 00000000..f9a2c40a --- /dev/null +++ b/_archive_jekyll/README.md @@ -0,0 +1,274 @@ +# [Minimal Mistakes Jekyll theme](https://mmistakes.github.io/minimal-mistakes/) + +[![LICENSE](https://img.shields.io/badge/license-MIT-lightgrey.svg)](https://raw.githubusercontent.com/mmistakes/minimal-mistakes/master/LICENSE) +[![Jekyll](https://img.shields.io/badge/jekyll-%3E%3D%203.7-blue.svg)](https://jekyllrb.com/) +[![Ruby gem](https://img.shields.io/gem/v/minimal-mistakes-jekyll.svg)](https://rubygems.org/gems/minimal-mistakes-jekyll) +[![Tip Me via PayPal](https://img.shields.io/badge/PayPal-tip%20me-green.svg?logo=paypal)](https://www.paypal.me/mmistakes) +[![Donate to this project using Buy Me A Coffee](https://img.shields.io/badge/buy%20me%20a%20coffee-donate-yellow.svg)](https://www.buymeacoffee.com/mmistakes) + +Minimal Mistakes is a flexible two-column Jekyll theme, perfect for building personal sites, blogs, and portfolios. As the name implies, styling is purposely minimalistic to be enhanced and customized by you :smile:. + +:sparkles: See what's new in the [CHANGELOG](CHANGELOG.md). + +**If you enjoy this theme, please consider sponsoring:** + +[!["Buy Me A Coffee"](https://user-images.githubusercontent.com/1376749/120938564-50c59780-c6e1-11eb-814f-22a0399623c5.png)](https://www.buymeacoffee.com/mmistakes) + [![Support via PayPal](https://cdn.jsdelivr.net/gh/twolfson/paypal-github-button@1.0.0/dist/button.svg)](https://www.paypal.me/mmistakes) + +**Note:** The theme uses the [jekyll-include-cache](https://github.com/benbalter/jekyll-include-cache) plugin which will need to be installed in your `Gemfile` and must be retained in the `plugins` array of `_config.yml`. Otherwise you'll encounter `Unknown tag 'include_cached'` errors at build. + +[![Minimal Mistakes live preview][2]][1] + +[1]: https://mmistakes.github.io/minimal-mistakes/ +[2]: screenshot.png (live preview) + +![layout examples](screenshot-layouts.png) + +## Notable features + +- Bundled as a "theme gem" for easier installation/upgrading. +- Compatible with GitHub Pages. +- Support for Jekyll's built-in Sass/SCSS preprocessor. +- Nine different skins (color variations). +- Several responsive layout options (single, archive index, search, splash, and paginated home page). +- Optimized for search engines with support for [Twitter Cards](https://dev.twitter.com/cards/overview) and [Open Graph](http://ogp.me/) data. +- Optional [header images](https://mmistakes.github.io/minimal-mistakes/docs/layouts/#headers), [custom sidebars](https://mmistakes.github.io/minimal-mistakes/docs/layouts/#sidebars), [table of contents](https://mmistakes.github.io/minimal-mistakes/docs/helpers/#table-of-contents), [galleries](https://mmistakes.github.io/minimal-mistakes/docs/helpers/#gallery), related posts, [breadcrumb links](https://mmistakes.github.io/minimal-mistakes/docs/configuration/#breadcrumb-navigation-beta), [navigation lists](https://mmistakes.github.io/minimal-mistakes/docs/helpers/#navigation-list), and more. +- Commenting support (powered by [Disqus](https://disqus.com/), [Facebook](https://developers.facebook.com/docs/plugins/comments), Google+, [Discourse](https://www.discourse.org/), static-based via [Staticman](https://staticman.net/), [utterances](https://utteranc.es/), and [giscus](https://giscus.app/)). +- [Google Analytics](https://www.google.com/analytics/) support. +- UI localized text in English (default), Arabic (عربي), Brazilian Portuguese (Português brasileiro), Catalan, Chinese, Danish, Dutch, Finnish, French (Français), German (Deutsch), Greek, Hebrew, Hindi (हिंदी), Hungarian, Indonesian, Irish (Gaeilge), Italian (Italiano), Japanese, Kiswahili, Korean, Malayalam, Myanmar (Burmese), Nepali (Nepalese), Norwegian (Norsk), Persian (فارسی), Polish, Punjabi (ਪੰਜਾਬੀ), Romanian, Russian, Slovak, Spanish (Español), Swedish, Thai, Turkish (Türkçe), and Vietnamese. + +## Skins (color variations) + +This theme comes in nine different skins (in addition to the default one). + +| `air` | `contrast` | `dark` | +| --- | --- | --- | +| [![air skin](https://mmistakes.github.io/minimal-mistakes/assets/images/air-skin-archive.png)](https://mmistakes.github.io/minimal-mistakes/assets/images/air-skin-archive-large.png) | [![contrast skin](https://mmistakes.github.io/minimal-mistakes/assets/images/contrast-skin-archive.png)](https://mmistakes.github.io/minimal-mistakes/assets/images/contrast-skin-archive-large.png) | [![dark skin](https://mmistakes.github.io/minimal-mistakes/assets/images/dark-skin-archive.png)](https://mmistakes.github.io/minimal-mistakes/assets/images/dark-skin-archive-large.png) | + +| `dirt` | `mint` | `sunrise` | +| --- | --- | --- | +| [![dirt skin](https://mmistakes.github.io/minimal-mistakes/assets/images/dirt-skin-archive.png)](https://mmistakes.github.io/minimal-mistakes/assets/images/dirt-skin-archive-large.png) | [![mint skin](https://mmistakes.github.io/minimal-mistakes/assets/images/mint-skin-archive.png)](https://mmistakes.github.io/minimal-mistakes/assets/images/mint-skin-archive-large.png) | [![sunrise skin](https://mmistakes.github.io/minimal-mistakes/assets/images/sunrise-skin-archive.png)](https://mmistakes.github.io/minimal-mistakes/assets/images/sunrise-skin-archive-large.png) | + +| `aqua` | `neon` | `plum` | +| --- | --- | --- | +| [![aqua skin](https://mmistakes.github.io/minimal-mistakes/assets/images/aqua-skin-archive.png)](https://mmistakes.github.io/minimal-mistakes/assets/images/aqua-skin-archive-large.png) | [![neon skin](https://mmistakes.github.io/minimal-mistakes/assets/images/neon-skin-archive.png)](https://mmistakes.github.io/minimal-mistakes/assets/images/neon-skin-archive-large.png) | [![plum skin](https://mmistakes.github.io/minimal-mistakes/assets/images/plum-skin-archive.png)](https://mmistakes.github.io/minimal-mistakes/assets/images/plum-skin-archive-large.png) | + +## Demo pages + +| Name | Description | +| ------------------------------------------- | ----------------------------------------------------- | +| [Post with Header Image][header-image-post] | A post with a large header image. | +| [HTML Tags and Formatting Post][html-tags-post] | A variety of common markup showing how the theme styles them. | +| [Syntax Highlighting Post][syntax-post] | Post displaying highlighted code. | +| [Post with a Gallery][gallery-post] | A post showing several images wrapped in `
` elements. | +| [Sample Collection Page][sample-collection] | Single page from a collection. | +| [Categories Archive][categories-archive] | Posts grouped by category. | +| [Tags Archive][tags-archive] | Posts grouped by tag. | + +Additional sample posts are available under [posts archive][year-archive] on the demo site. Source files for these (and the entire demo site) can be found in [`/docs`](../docs). + +[header-image-post]: https://mmistakes.github.io/minimal-mistakes/layout-header-image-text-readability/ +[gallery-post]: https://mmistakes.github.io/minimal-mistakes/post%20formats/post-gallery/ +[html-tags-post]: https://mmistakes.github.io/minimal-mistakes/markup/markup-html-tags-and-formatting/ +[syntax-post]: https://mmistakes.github.io/minimal-mistakes/markup-syntax-highlighting/ +[sample-collection]: https://mmistakes.github.io/minimal-mistakes/recipes/chocolate-chip-cookies/ +[categories-archive]: https://mmistakes.github.io/minimal-mistakes/categories/ +[tags-archive]: https://mmistakes.github.io/minimal-mistakes/tags/ +[year-archive]: https://mmistakes.github.io/minimal-mistakes/year-archive/ + +## Installation + +There are three ways to install: as a [gem-based theme](https://jekyllrb.com/docs/themes/#understanding-gem-based-themes), as a [remote theme](https://blog.github.com/2017-11-29-use-any-theme-with-github-pages/) (GitHub Pages compatible), or forking/directly copying all of the theme files into your project. + +### Gem-based method + +With Gem-based themes, directories such as the `assets`, `_layouts`, `_includes`, and `_sass` are stored in the theme’s gem, hidden from your immediate view. Yet all of the necessary directories will be read and processed during Jekyll’s build process. + +This allows for easier installation and updating as you don't have to manage any of the theme files. To install: + +1. Add the following to your `Gemfile`: + + ```ruby + gem "minimal-mistakes-jekyll" + ``` + +2. Fetch and update bundled gems by running the following [Bundler](http://bundler.io/) command: + + ```bash + bundle + ``` + +3. Set the `theme` in your project's Jekyll `_config.yml` file: + + ```yaml + theme: minimal-mistakes-jekyll + ``` + +To update the theme run `bundle update`. + +### Remote theme method + +Remote themes are similar to Gem-based themes, but do not require `Gemfile` changes or whitelisting making them ideal for sites hosted with GitHub Pages. + +To install: + +1. Create/replace the contents of your `Gemfile` with the following: + + ```ruby + source "https://rubygems.org" + + gem "github-pages", group: :jekyll_plugins + gem "jekyll-include-cache", group: :jekyll_plugins + ``` + +2. Add `jekyll-include-cache` to the `plugins` array of your `_config.yml`. + +3. Fetch and update bundled gems by running the following [Bundler](http://bundler.io/) command: + + ```bash + bundle + ``` + +4. Add `remote_theme: "mmistakes/minimal-mistakes@4.24.0"` to your `_config.yml` file. Remove any other `theme:` or `remote_theme:` entry. + +**Looking for an example?** Use the [Minimal Mistakes remote theme starter](https://github.com/mmistakes/mm-github-pages-starter/generate) for the quickest method of getting a GitHub Pages hosted site up and running. Generate a new repository from the starter, replace sample content with your own, and configure as needed. + +## Usage + +For detailed instructions on how to configure, customize, add/migrate content, and more read the [theme's documentation](https://mmistakes.github.io/minimal-mistakes/docs/quick-start-guide/). + +--- + +## Contributing + +Found a typo in the documentation or interested in [fixing a bug](https://github.com/mmistakes/minimal-mistakes/issues)? Then by all means [submit an issue](https://github.com/mmistakes/minimal-mistakes/issues/new) or [pull request](https://help.github.com/articles/using-pull-requests/). If this is your first pull request, it may be helpful to read up on the [GitHub Flow](https://guides.github.com/introduction/flow/) first. + +For help with using the theme or general Jekyll support questions, please use the [Jekyll Talk forums](https://talk.jekyllrb.com/). + +### Pull Requests + +When submitting a pull request: + +1. Clone the repo. +2. Create a branch off of `master` and give it a meaningful name (e.g. `my-awesome-new-feature`). +3. Open a pull request on GitHub and describe the feature or fix. + +Theme documentation and demo pages can be found in the [`/docs`](../docs) if submitting improvements, typo corrections, etc. + +## Development + +To set up your environment to develop this theme, run `bundle install`. + +To test the theme, run `bundle exec rake preview` and open your browser at `http://localhost:4000/test/`. This starts a Jekyll server using content in the `test/` directory. As modifications are made to the theme and test site, it will regenerate and you should see the changes in the browser after a refresh. + +--- + +## Credits + +### Creator + +**Michael Rose** + +- +- +- + +### Icons + Demo Images: + +- [The Noun Project](https://thenounproject.com) -- Garrett Knoll, Arthur Shlain, and [tracy tam](https://thenounproject.com/tracytam) +- [Font Awesome](http://fontawesome.io/) +- [Unsplash](https://unsplash.com/) + +### Other: + +- [Jekyll](http://jekyllrb.com/) +- [jQuery](http://jquery.com/) +- [Susy](http://susy.oddbird.net/) +- [Breakpoint](http://breakpoint-sass.com/) +- [Magnific Popup](http://dimsemenov.com/plugins/magnific-popup/) +- [FitVids.JS](http://fitvidsjs.com/) +- [GreedyNav.js](https://github.com/lukejacksonn/GreedyNav) +- [Smooth Scroll](https://github.com/cferdinandi/smooth-scroll) +- [Gumshoe](https://github.com/cferdinandi/gumshoe) +- [jQuery throttle / debounce](http://benalman.com/projects/jquery-throttle-debounce-plugin/) +- [Lunr](http://lunrjs.com) + +--- + +## License + +The MIT License (MIT) + +Copyright (c) 2013-2020 Michael Rose and contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +Minimal Mistakes incorporates icons from [The Noun Project](https://thenounproject.com/) +creators Garrett Knoll, Arthur Shlain, and tracy tam. +Icons are distributed under Creative Commons Attribution 3.0 United States (CC BY 3.0 US). + +Minimal Mistakes incorporates [Font Awesome](http://fontawesome.io/), +Copyright (c) 2017 Dave Gandy. +Font Awesome is distributed under the terms of the [SIL OFL 1.1](http://scripts.sil.org/OFL) +and [MIT License](http://opensource.org/licenses/MIT). + +Minimal Mistakes incorporates photographs from [Unsplash](https://unsplash.com). + +Minimal Mistakes incorporates [Susy](http://susy.oddbird.net/), +Copyright (c) 2017, Miriam Eric Suzanne. +Susy is distributed under the terms of the [BSD 3-clause "New" or "Revised" License](https://opensource.org/licenses/BSD-3-Clause). + +Minimal Mistakes incorporates [Breakpoint](http://breakpoint-sass.com/). +Breakpoint is distributed under the terms of the [MIT/GPL Licenses](http://opensource.org/licenses/MIT). + +Minimal Mistakes incorporates [FitVids.js](https://github.com/davatron5000/FitVids.js/), +Copyright (c) 2013 Dave Rubert and Chris Coyier. +FitVids is distributed under the terms of the [WTFPL License](http://www.wtfpl.net/). + +Minimal Mistakes incorporates [Magnific Popup](http://dimsemenov.com/plugins/magnific-popup/), +Copyright (c) 2014-2016 Dmitry Semenov, http://dimsemenov.com. +Magnific Popup is distributed under the terms of the MIT License. + +Minimal Mistakes incorporates [Smooth Scroll](http://github.com/cferdinandi/smooth-scroll), +Copyright (c) 2019 Chris Ferdinandi. +Smooth Scroll is distributed under the terms of the [MIT License](http://opensource.org/licenses/MIT). + +Minimal Mistakes incorporates [Gumshoejs](http://github.com/cferdinandi/gumshoe), +Copyright (c) 2019 Chris Ferdinandi. +Gumshoejs is distributed under the terms of the [MIT License](http://opensource.org/licenses/MIT). + +Minimal Mistakes incorporates [jQuery throttle / debounce](http://benalman.com/projects/jquery-throttle-debounce-plugin/), +Copyright (c) 2010 "Cowboy" Ben Alman. +jQuery throttle / debounce is distributed under the terms of the [MIT License](http://opensource.org/licenses/MIT). + +Minimal Mistakes incorporates [GreedyNav.js](https://github.com/lukejacksonn/GreedyNav), +Copyright (c) 2015 Luke Jackson. +GreedyNav.js is distributed under the terms of the [MIT License](http://opensource.org/licenses/MIT). + +Minimal Mistakes incorporates [Jekyll Group-By-Array](https://github.com/mushishi78/jekyll-group-by-array), +Copyright (c) 2015 Max White . +Jekyll Group-By-Array is distributed under the terms of the [MIT License](http://opensource.org/licenses/MIT). + +Minimal Mistakes incorporates [@allejo's Pure Liquid Jekyll Table of Contents](https://allejo.io/blog/a-jekyll-toc-in-liquid-only/), +Copyright (c) 2017 Vladimir Jimenez. +Pure Liquid Jekyll Table of Contents is distributed under the terms of the [MIT License](http://opensource.org/licenses/MIT). + +Minimal Mistakes incorporates [Lunr](http://lunrjs.com), +Copyright (c) 2018 Oliver Nightingale. +Lunr is distributed under the terms of the [MIT License](http://opensource.org/licenses/MIT). diff --git a/Rakefile b/_archive_jekyll/Rakefile similarity index 100% rename from Rakefile rename to _archive_jekyll/Rakefile diff --git a/_config.yml b/_archive_jekyll/_config.yml similarity index 100% rename from _config.yml rename to _archive_jekyll/_config.yml diff --git a/_data/navigation.yml b/_archive_jekyll/_data/navigation.yml similarity index 100% rename from _data/navigation.yml rename to _archive_jekyll/_data/navigation.yml diff --git a/_data/ui-text.yml b/_archive_jekyll/_data/ui-text.yml similarity index 100% rename from _data/ui-text.yml rename to _archive_jekyll/_data/ui-text.yml diff --git a/_image/blog/2018-05-26-grep-like-powershell-colorful-select-string/Select-ColorString-demo.gif b/_archive_jekyll/_image/blog/2018-05-26-grep-like-powershell-colorful-select-string/Select-ColorString-demo.gif similarity index 100% rename from _image/blog/2018-05-26-grep-like-powershell-colorful-select-string/Select-ColorString-demo.gif rename to _archive_jekyll/_image/blog/2018-05-26-grep-like-powershell-colorful-select-string/Select-ColorString-demo.gif diff --git a/_image/blog/2018-05-26-grep-like-powershell-colorful-select-string/new-switch-MultiColorsForSimplePattern.PNG b/_archive_jekyll/_image/blog/2018-05-26-grep-like-powershell-colorful-select-string/new-switch-MultiColorsForSimplePattern.PNG similarity index 100% rename from _image/blog/2018-05-26-grep-like-powershell-colorful-select-string/new-switch-MultiColorsForSimplePattern.PNG rename to _archive_jekyll/_image/blog/2018-05-26-grep-like-powershell-colorful-select-string/new-switch-MultiColorsForSimplePattern.PNG diff --git a/_image/blog/2018-05-26-grep-like-powershell-colorful-select-string/powershell7-default-highlighting.png b/_archive_jekyll/_image/blog/2018-05-26-grep-like-powershell-colorful-select-string/powershell7-default-highlighting.png similarity index 100% rename from _image/blog/2018-05-26-grep-like-powershell-colorful-select-string/powershell7-default-highlighting.png rename to _archive_jekyll/_image/blog/2018-05-26-grep-like-powershell-colorful-select-string/powershell7-default-highlighting.png diff --git a/_image/blog/2018-05-26-grep-like-powershell-colorful-select-string/trace-word-screenshot.png b/_archive_jekyll/_image/blog/2018-05-26-grep-like-powershell-colorful-select-string/trace-word-screenshot.png similarity index 100% rename from _image/blog/2018-05-26-grep-like-powershell-colorful-select-string/trace-word-screenshot.png rename to _archive_jekyll/_image/blog/2018-05-26-grep-like-powershell-colorful-select-string/trace-word-screenshot.png diff --git a/_image/blog/2018-09-20-install-gitlab-runner-on-windows-by-powershell-psremoting/gitlab-runner-settings-from-web.PNG b/_archive_jekyll/_image/blog/2018-09-20-install-gitlab-runner-on-windows-by-powershell-psremoting/gitlab-runner-settings-from-web.PNG similarity index 100% rename from _image/blog/2018-09-20-install-gitlab-runner-on-windows-by-powershell-psremoting/gitlab-runner-settings-from-web.PNG rename to _archive_jekyll/_image/blog/2018-09-20-install-gitlab-runner-on-windows-by-powershell-psremoting/gitlab-runner-settings-from-web.PNG diff --git a/_image/blog/2018-10-18-using-gitlab-integrated-cicd-for-python-project-on-windows/gitlab-ci.yml_result_from_job_view.PNG b/_archive_jekyll/_image/blog/2018-10-18-using-gitlab-integrated-cicd-for-python-project-on-windows/gitlab-ci.yml_result_from_job_view.PNG similarity index 100% rename from _image/blog/2018-10-18-using-gitlab-integrated-cicd-for-python-project-on-windows/gitlab-ci.yml_result_from_job_view.PNG rename to _archive_jekyll/_image/blog/2018-10-18-using-gitlab-integrated-cicd-for-python-project-on-windows/gitlab-ci.yml_result_from_job_view.PNG diff --git a/_image/blog/2018-10-18-using-gitlab-integrated-cicd-for-python-project-on-windows/gitlab-ci.yml_result_from_merge_request_view.PNG b/_archive_jekyll/_image/blog/2018-10-18-using-gitlab-integrated-cicd-for-python-project-on-windows/gitlab-ci.yml_result_from_merge_request_view.PNG similarity index 100% rename from _image/blog/2018-10-18-using-gitlab-integrated-cicd-for-python-project-on-windows/gitlab-ci.yml_result_from_merge_request_view.PNG rename to _archive_jekyll/_image/blog/2018-10-18-using-gitlab-integrated-cicd-for-python-project-on-windows/gitlab-ci.yml_result_from_merge_request_view.PNG diff --git a/_image/blog/2018-10-18-using-gitlab-integrated-cicd-for-python-project-on-windows/gitlab-ci.yml_result_from_pipeline_view.PNG b/_archive_jekyll/_image/blog/2018-10-18-using-gitlab-integrated-cicd-for-python-project-on-windows/gitlab-ci.yml_result_from_pipeline_view.PNG similarity index 100% rename from _image/blog/2018-10-18-using-gitlab-integrated-cicd-for-python-project-on-windows/gitlab-ci.yml_result_from_pipeline_view.PNG rename to _archive_jekyll/_image/blog/2018-10-18-using-gitlab-integrated-cicd-for-python-project-on-windows/gitlab-ci.yml_result_from_pipeline_view.PNG diff --git a/_image/blog/2019-06-19-git-cheat-sheet/git-gui.PNG b/_archive_jekyll/_image/blog/2019-06-19-git-cheat-sheet/git-gui.PNG similarity index 100% rename from _image/blog/2019-06-19-git-cheat-sheet/git-gui.PNG rename to _archive_jekyll/_image/blog/2019-06-19-git-cheat-sheet/git-gui.PNG diff --git a/_image/blog/2022-03-27-manage-azure-databricks-service-principal/azure-databricks-multiple-workspaces.png b/_archive_jekyll/_image/blog/2022-03-27-manage-azure-databricks-service-principal/azure-databricks-multiple-workspaces.png similarity index 100% rename from _image/blog/2022-03-27-manage-azure-databricks-service-principal/azure-databricks-multiple-workspaces.png rename to _archive_jekyll/_image/blog/2022-03-27-manage-azure-databricks-service-principal/azure-databricks-multiple-workspaces.png diff --git a/_includes/analytics-providers/custom.html b/_archive_jekyll/_includes/analytics-providers/custom.html similarity index 100% rename from _includes/analytics-providers/custom.html rename to _archive_jekyll/_includes/analytics-providers/custom.html diff --git a/_includes/analytics-providers/google-gtag.html b/_archive_jekyll/_includes/analytics-providers/google-gtag.html similarity index 100% rename from _includes/analytics-providers/google-gtag.html rename to _archive_jekyll/_includes/analytics-providers/google-gtag.html diff --git a/_includes/analytics-providers/google-universal.html b/_archive_jekyll/_includes/analytics-providers/google-universal.html similarity index 100% rename from _includes/analytics-providers/google-universal.html rename to _archive_jekyll/_includes/analytics-providers/google-universal.html diff --git a/_includes/analytics-providers/google.html b/_archive_jekyll/_includes/analytics-providers/google.html similarity index 100% rename from _includes/analytics-providers/google.html rename to _archive_jekyll/_includes/analytics-providers/google.html diff --git a/_includes/analytics.html b/_archive_jekyll/_includes/analytics.html similarity index 100% rename from _includes/analytics.html rename to _archive_jekyll/_includes/analytics.html diff --git a/_includes/archive-single.html b/_archive_jekyll/_includes/archive-single.html similarity index 100% rename from _includes/archive-single.html rename to _archive_jekyll/_includes/archive-single.html diff --git a/_includes/author-profile-custom-links.html b/_archive_jekyll/_includes/author-profile-custom-links.html similarity index 100% rename from _includes/author-profile-custom-links.html rename to _archive_jekyll/_includes/author-profile-custom-links.html diff --git a/_includes/author-profile.html b/_archive_jekyll/_includes/author-profile.html similarity index 100% rename from _includes/author-profile.html rename to _archive_jekyll/_includes/author-profile.html diff --git a/_includes/breadcrumbs.html b/_archive_jekyll/_includes/breadcrumbs.html similarity index 100% rename from _includes/breadcrumbs.html rename to _archive_jekyll/_includes/breadcrumbs.html diff --git a/_includes/category-list.html b/_archive_jekyll/_includes/category-list.html similarity index 100% rename from _includes/category-list.html rename to _archive_jekyll/_includes/category-list.html diff --git a/_includes/comment.html b/_archive_jekyll/_includes/comment.html similarity index 100% rename from _includes/comment.html rename to _archive_jekyll/_includes/comment.html diff --git a/_includes/comments-providers/custom.html b/_archive_jekyll/_includes/comments-providers/custom.html similarity index 100% rename from _includes/comments-providers/custom.html rename to _archive_jekyll/_includes/comments-providers/custom.html diff --git a/_includes/comments-providers/custom_scripts.html b/_archive_jekyll/_includes/comments-providers/custom_scripts.html similarity index 100% rename from _includes/comments-providers/custom_scripts.html rename to _archive_jekyll/_includes/comments-providers/custom_scripts.html diff --git a/_includes/comments-providers/discourse.html b/_archive_jekyll/_includes/comments-providers/discourse.html similarity index 100% rename from _includes/comments-providers/discourse.html rename to _archive_jekyll/_includes/comments-providers/discourse.html diff --git a/_includes/comments-providers/disqus.html b/_archive_jekyll/_includes/comments-providers/disqus.html similarity index 100% rename from _includes/comments-providers/disqus.html rename to _archive_jekyll/_includes/comments-providers/disqus.html diff --git a/_includes/comments-providers/facebook.html b/_archive_jekyll/_includes/comments-providers/facebook.html similarity index 100% rename from _includes/comments-providers/facebook.html rename to _archive_jekyll/_includes/comments-providers/facebook.html diff --git a/_includes/comments-providers/giscus.html b/_archive_jekyll/_includes/comments-providers/giscus.html similarity index 100% rename from _includes/comments-providers/giscus.html rename to _archive_jekyll/_includes/comments-providers/giscus.html diff --git a/_includes/comments-providers/scripts.html b/_archive_jekyll/_includes/comments-providers/scripts.html similarity index 100% rename from _includes/comments-providers/scripts.html rename to _archive_jekyll/_includes/comments-providers/scripts.html diff --git a/_includes/comments-providers/staticman.html b/_archive_jekyll/_includes/comments-providers/staticman.html similarity index 100% rename from _includes/comments-providers/staticman.html rename to _archive_jekyll/_includes/comments-providers/staticman.html diff --git a/_includes/comments-providers/staticman_v2.html b/_archive_jekyll/_includes/comments-providers/staticman_v2.html similarity index 100% rename from _includes/comments-providers/staticman_v2.html rename to _archive_jekyll/_includes/comments-providers/staticman_v2.html diff --git a/_includes/comments-providers/utterances.html b/_archive_jekyll/_includes/comments-providers/utterances.html similarity index 100% rename from _includes/comments-providers/utterances.html rename to _archive_jekyll/_includes/comments-providers/utterances.html diff --git a/_includes/comments.html b/_archive_jekyll/_includes/comments.html similarity index 100% rename from _includes/comments.html rename to _archive_jekyll/_includes/comments.html diff --git a/_includes/documents-collection.html b/_archive_jekyll/_includes/documents-collection.html similarity index 100% rename from _includes/documents-collection.html rename to _archive_jekyll/_includes/documents-collection.html diff --git a/_includes/feature_row b/_archive_jekyll/_includes/feature_row similarity index 100% rename from _includes/feature_row rename to _archive_jekyll/_includes/feature_row diff --git a/_includes/figure b/_archive_jekyll/_includes/figure similarity index 100% rename from _includes/figure rename to _archive_jekyll/_includes/figure diff --git a/_includes/footer.html b/_archive_jekyll/_includes/footer.html similarity index 100% rename from _includes/footer.html rename to _archive_jekyll/_includes/footer.html diff --git a/_includes/footer/custom.html b/_archive_jekyll/_includes/footer/custom.html similarity index 100% rename from _includes/footer/custom.html rename to _archive_jekyll/_includes/footer/custom.html diff --git a/_includes/gallery b/_archive_jekyll/_includes/gallery similarity index 100% rename from _includes/gallery rename to _archive_jekyll/_includes/gallery diff --git a/_includes/group-by-array b/_archive_jekyll/_includes/group-by-array similarity index 100% rename from _includes/group-by-array rename to _archive_jekyll/_includes/group-by-array diff --git a/_includes/head.html b/_archive_jekyll/_includes/head.html similarity index 100% rename from _includes/head.html rename to _archive_jekyll/_includes/head.html diff --git a/_includes/head/custom.html b/_archive_jekyll/_includes/head/custom.html similarity index 100% rename from _includes/head/custom.html rename to _archive_jekyll/_includes/head/custom.html diff --git a/_includes/masthead.html b/_archive_jekyll/_includes/masthead.html similarity index 100% rename from _includes/masthead.html rename to _archive_jekyll/_includes/masthead.html diff --git a/_includes/mermaid.html b/_archive_jekyll/_includes/mermaid.html similarity index 100% rename from _includes/mermaid.html rename to _archive_jekyll/_includes/mermaid.html diff --git a/_includes/nav_list b/_archive_jekyll/_includes/nav_list similarity index 100% rename from _includes/nav_list rename to _archive_jekyll/_includes/nav_list diff --git a/_includes/page__date.html b/_archive_jekyll/_includes/page__date.html similarity index 100% rename from _includes/page__date.html rename to _archive_jekyll/_includes/page__date.html diff --git a/_includes/page__hero.html b/_archive_jekyll/_includes/page__hero.html similarity index 100% rename from _includes/page__hero.html rename to _archive_jekyll/_includes/page__hero.html diff --git a/_includes/page__hero_video.html b/_archive_jekyll/_includes/page__hero_video.html similarity index 100% rename from _includes/page__hero_video.html rename to _archive_jekyll/_includes/page__hero_video.html diff --git a/_includes/page__meta.html b/_archive_jekyll/_includes/page__meta.html similarity index 100% rename from _includes/page__meta.html rename to _archive_jekyll/_includes/page__meta.html diff --git a/_includes/page__taxonomy.html b/_archive_jekyll/_includes/page__taxonomy.html similarity index 100% rename from _includes/page__taxonomy.html rename to _archive_jekyll/_includes/page__taxonomy.html diff --git a/_includes/paginator.html b/_archive_jekyll/_includes/paginator.html similarity index 100% rename from _includes/paginator.html rename to _archive_jekyll/_includes/paginator.html diff --git a/_includes/post_pagination.html b/_archive_jekyll/_includes/post_pagination.html similarity index 100% rename from _includes/post_pagination.html rename to _archive_jekyll/_includes/post_pagination.html diff --git a/_includes/posts-category.html b/_archive_jekyll/_includes/posts-category.html similarity index 100% rename from _includes/posts-category.html rename to _archive_jekyll/_includes/posts-category.html diff --git a/_includes/posts-tag.html b/_archive_jekyll/_includes/posts-tag.html similarity index 100% rename from _includes/posts-tag.html rename to _archive_jekyll/_includes/posts-tag.html diff --git a/_includes/qrcode.html b/_archive_jekyll/_includes/qrcode.html similarity index 100% rename from _includes/qrcode.html rename to _archive_jekyll/_includes/qrcode.html diff --git a/_includes/scripts.html b/_archive_jekyll/_includes/scripts.html similarity index 100% rename from _includes/scripts.html rename to _archive_jekyll/_includes/scripts.html diff --git a/_includes/search/algolia-search-scripts.html b/_archive_jekyll/_includes/search/algolia-search-scripts.html similarity index 100% rename from _includes/search/algolia-search-scripts.html rename to _archive_jekyll/_includes/search/algolia-search-scripts.html diff --git a/_includes/search/google-search-scripts.html b/_archive_jekyll/_includes/search/google-search-scripts.html similarity index 100% rename from _includes/search/google-search-scripts.html rename to _archive_jekyll/_includes/search/google-search-scripts.html diff --git a/_includes/search/lunr-search-scripts.html b/_archive_jekyll/_includes/search/lunr-search-scripts.html similarity index 100% rename from _includes/search/lunr-search-scripts.html rename to _archive_jekyll/_includes/search/lunr-search-scripts.html diff --git a/_includes/search/search_form.html b/_archive_jekyll/_includes/search/search_form.html similarity index 100% rename from _includes/search/search_form.html rename to _archive_jekyll/_includes/search/search_form.html diff --git a/_includes/seo.html b/_archive_jekyll/_includes/seo.html similarity index 100% rename from _includes/seo.html rename to _archive_jekyll/_includes/seo.html diff --git a/_includes/sidebar.html b/_archive_jekyll/_includes/sidebar.html similarity index 100% rename from _includes/sidebar.html rename to _archive_jekyll/_includes/sidebar.html diff --git a/_includes/skip-links.html b/_archive_jekyll/_includes/skip-links.html similarity index 100% rename from _includes/skip-links.html rename to _archive_jekyll/_includes/skip-links.html diff --git a/_includes/social-share.html b/_archive_jekyll/_includes/social-share.html similarity index 100% rename from _includes/social-share.html rename to _archive_jekyll/_includes/social-share.html diff --git a/_includes/tag-list.html b/_archive_jekyll/_includes/tag-list.html similarity index 100% rename from _includes/tag-list.html rename to _archive_jekyll/_includes/tag-list.html diff --git a/_includes/toc b/_archive_jekyll/_includes/toc similarity index 100% rename from _includes/toc rename to _archive_jekyll/_includes/toc diff --git a/_includes/toc.html b/_archive_jekyll/_includes/toc.html similarity index 100% rename from _includes/toc.html rename to _archive_jekyll/_includes/toc.html diff --git a/_includes/video b/_archive_jekyll/_includes/video similarity index 100% rename from _includes/video rename to _archive_jekyll/_includes/video diff --git a/_layouts/archive-taxonomy.html b/_archive_jekyll/_layouts/archive-taxonomy.html similarity index 100% rename from _layouts/archive-taxonomy.html rename to _archive_jekyll/_layouts/archive-taxonomy.html diff --git a/_layouts/archive.html b/_archive_jekyll/_layouts/archive.html similarity index 100% rename from _layouts/archive.html rename to _archive_jekyll/_layouts/archive.html diff --git a/_layouts/categories.html b/_archive_jekyll/_layouts/categories.html similarity index 100% rename from _layouts/categories.html rename to _archive_jekyll/_layouts/categories.html diff --git a/_layouts/category.html b/_archive_jekyll/_layouts/category.html similarity index 100% rename from _layouts/category.html rename to _archive_jekyll/_layouts/category.html diff --git a/_layouts/collection.html b/_archive_jekyll/_layouts/collection.html similarity index 100% rename from _layouts/collection.html rename to _archive_jekyll/_layouts/collection.html diff --git a/_layouts/compress.html b/_archive_jekyll/_layouts/compress.html similarity index 100% rename from _layouts/compress.html rename to _archive_jekyll/_layouts/compress.html diff --git a/_layouts/default.html b/_archive_jekyll/_layouts/default.html similarity index 100% rename from _layouts/default.html rename to _archive_jekyll/_layouts/default.html diff --git a/_layouts/home.html b/_archive_jekyll/_layouts/home.html similarity index 100% rename from _layouts/home.html rename to _archive_jekyll/_layouts/home.html diff --git a/_layouts/posts.html b/_archive_jekyll/_layouts/posts.html similarity index 100% rename from _layouts/posts.html rename to _archive_jekyll/_layouts/posts.html diff --git a/_layouts/search.html b/_archive_jekyll/_layouts/search.html similarity index 100% rename from _layouts/search.html rename to _archive_jekyll/_layouts/search.html diff --git a/_layouts/single.html b/_archive_jekyll/_layouts/single.html similarity index 100% rename from _layouts/single.html rename to _archive_jekyll/_layouts/single.html diff --git a/_layouts/splash.html b/_archive_jekyll/_layouts/splash.html similarity index 100% rename from _layouts/splash.html rename to _archive_jekyll/_layouts/splash.html diff --git a/_layouts/tag.html b/_archive_jekyll/_layouts/tag.html similarity index 100% rename from _layouts/tag.html rename to _archive_jekyll/_layouts/tag.html diff --git a/_layouts/tags.html b/_archive_jekyll/_layouts/tags.html similarity index 100% rename from _layouts/tags.html rename to _archive_jekyll/_layouts/tags.html diff --git a/_pages/tag-archive.md b/_archive_jekyll/_pages/tag-archive.md similarity index 100% rename from _pages/tag-archive.md rename to _archive_jekyll/_pages/tag-archive.md diff --git a/_pages/year-archive.md b/_archive_jekyll/_pages/year-archive.md similarity index 100% rename from _pages/year-archive.md rename to _archive_jekyll/_pages/year-archive.md diff --git a/_posts/2018/2018-05-03-setting-up-github-pages-with-custom-domain-over-https.md b/_archive_jekyll/_posts/2018/2018-05-03-setting-up-github-pages-with-custom-domain-over-https.md similarity index 100% rename from _posts/2018/2018-05-03-setting-up-github-pages-with-custom-domain-over-https.md rename to _archive_jekyll/_posts/2018/2018-05-03-setting-up-github-pages-with-custom-domain-over-https.md diff --git a/_posts/2018/2018-05-07-setting-up-powershell-gallery-and-nuget-gallery-for-powershell.md b/_archive_jekyll/_posts/2018/2018-05-07-setting-up-powershell-gallery-and-nuget-gallery-for-powershell.md similarity index 100% rename from _posts/2018/2018-05-07-setting-up-powershell-gallery-and-nuget-gallery-for-powershell.md rename to _archive_jekyll/_posts/2018/2018-05-07-setting-up-powershell-gallery-and-nuget-gallery-for-powershell.md diff --git a/_posts/2018/2018-05-16-powershell-stop-parsing.md b/_archive_jekyll/_posts/2018/2018-05-16-powershell-stop-parsing.md similarity index 100% rename from _posts/2018/2018-05-16-powershell-stop-parsing.md rename to _archive_jekyll/_posts/2018/2018-05-16-powershell-stop-parsing.md diff --git a/_posts/2018/2018-05-19-setting-up-jekyll-with-minimal-mistakes-theme-on-windows.md b/_archive_jekyll/_posts/2018/2018-05-19-setting-up-jekyll-with-minimal-mistakes-theme-on-windows.md similarity index 100% rename from _posts/2018/2018-05-19-setting-up-jekyll-with-minimal-mistakes-theme-on-windows.md rename to _archive_jekyll/_posts/2018/2018-05-19-setting-up-jekyll-with-minimal-mistakes-theme-on-windows.md diff --git a/_posts/2018/2018-05-22-using-readline-in-python-repl-on-windows.md b/_archive_jekyll/_posts/2018/2018-05-22-using-readline-in-python-repl-on-windows.md similarity index 100% rename from _posts/2018/2018-05-22-using-readline-in-python-repl-on-windows.md rename to _archive_jekyll/_posts/2018/2018-05-22-using-readline-in-python-repl-on-windows.md diff --git a/_posts/2018/2018-05-26-grep-like-powershell-colorful-select-string.md b/_archive_jekyll/_posts/2018/2018-05-26-grep-like-powershell-colorful-select-string.md similarity index 100% rename from _posts/2018/2018-05-26-grep-like-powershell-colorful-select-string.md rename to _archive_jekyll/_posts/2018/2018-05-26-grep-like-powershell-colorful-select-string.md diff --git a/_posts/2018/2018-06-03-converting-python-json-list-to-csv-in-2-lines-of-code-by-pandas.md b/_archive_jekyll/_posts/2018/2018-06-03-converting-python-json-list-to-csv-in-2-lines-of-code-by-pandas.md similarity index 100% rename from _posts/2018/2018-06-03-converting-python-json-list-to-csv-in-2-lines-of-code-by-pandas.md rename to _archive_jekyll/_posts/2018/2018-06-03-converting-python-json-list-to-csv-in-2-lines-of-code-by-pandas.md diff --git a/_posts/2018/2018-06-21-import-python-module-with-sys-path-when-without-init-file.md b/_archive_jekyll/_posts/2018/2018-06-21-import-python-module-with-sys-path-when-without-init-file.md similarity index 100% rename from _posts/2018/2018-06-21-import-python-module-with-sys-path-when-without-init-file.md rename to _archive_jekyll/_posts/2018/2018-06-21-import-python-module-with-sys-path-when-without-init-file.md diff --git a/_posts/2018/2018-06-22-git-untrack-submodule-from-git-status.md b/_archive_jekyll/_posts/2018/2018-06-22-git-untrack-submodule-from-git-status.md similarity index 100% rename from _posts/2018/2018-06-22-git-untrack-submodule-from-git-status.md rename to _archive_jekyll/_posts/2018/2018-06-22-git-untrack-submodule-from-git-status.md diff --git a/_posts/2018/2018-06-26-install-python-on-windows-with-powershell-without-administrator-privileges.md b/_archive_jekyll/_posts/2018/2018-06-26-install-python-on-windows-with-powershell-without-administrator-privileges.md similarity index 100% rename from _posts/2018/2018-06-26-install-python-on-windows-with-powershell-without-administrator-privileges.md rename to _archive_jekyll/_posts/2018/2018-06-26-install-python-on-windows-with-powershell-without-administrator-privileges.md diff --git a/_posts/2018/2018-07-25-use-pyvmomi-EventHistoryCollector-to-get-all-the-vcenter-events.md b/_archive_jekyll/_posts/2018/2018-07-25-use-pyvmomi-EventHistoryCollector-to-get-all-the-vcenter-events.md similarity index 100% rename from _posts/2018/2018-07-25-use-pyvmomi-EventHistoryCollector-to-get-all-the-vcenter-events.md rename to _archive_jekyll/_posts/2018/2018-07-25-use-pyvmomi-EventHistoryCollector-to-get-all-the-vcenter-events.md diff --git a/_posts/2018/2018-07-28-use-python-tabulate-module-to-create-tables.md b/_archive_jekyll/_posts/2018/2018-07-28-use-python-tabulate-module-to-create-tables.md similarity index 100% rename from _posts/2018/2018-07-28-use-python-tabulate-module-to-create-tables.md rename to _archive_jekyll/_posts/2018/2018-07-28-use-python-tabulate-module-to-create-tables.md diff --git a/_posts/2018/2018-07-29-convert-markdown-or-rst-to-atlassian-confluance-documentation-format.md b/_archive_jekyll/_posts/2018/2018-07-29-convert-markdown-or-rst-to-atlassian-confluance-documentation-format.md similarity index 100% rename from _posts/2018/2018-07-29-convert-markdown-or-rst-to-atlassian-confluance-documentation-format.md rename to _archive_jekyll/_posts/2018/2018-07-29-convert-markdown-or-rst-to-atlassian-confluance-documentation-format.md diff --git a/_posts/2018/2018-09-05-windows-scheduled-task-by-powershell.md b/_archive_jekyll/_posts/2018/2018-09-05-windows-scheduled-task-by-powershell.md similarity index 100% rename from _posts/2018/2018-09-05-windows-scheduled-task-by-powershell.md rename to _archive_jekyll/_posts/2018/2018-09-05-windows-scheduled-task-by-powershell.md diff --git a/_posts/2018/2018-09-06-install-gitlab-ce-in-docker-on-ubuntu.md b/_archive_jekyll/_posts/2018/2018-09-06-install-gitlab-ce-in-docker-on-ubuntu.md similarity index 100% rename from _posts/2018/2018-09-06-install-gitlab-ce-in-docker-on-ubuntu.md rename to _archive_jekyll/_posts/2018/2018-09-06-install-gitlab-ce-in-docker-on-ubuntu.md diff --git a/_posts/2018/2018-09-16-setup-https-for-gitlab.md b/_archive_jekyll/_posts/2018/2018-09-16-setup-https-for-gitlab.md similarity index 100% rename from _posts/2018/2018-09-16-setup-https-for-gitlab.md rename to _archive_jekyll/_posts/2018/2018-09-16-setup-https-for-gitlab.md diff --git a/_posts/2018/2018-09-20-install-gitlab-runner-on-windows-by-powershell-psremoting.md b/_archive_jekyll/_posts/2018/2018-09-20-install-gitlab-runner-on-windows-by-powershell-psremoting.md similarity index 100% rename from _posts/2018/2018-09-20-install-gitlab-runner-on-windows-by-powershell-psremoting.md rename to _archive_jekyll/_posts/2018/2018-09-20-install-gitlab-runner-on-windows-by-powershell-psremoting.md diff --git a/_posts/2018/2018-09-24-backup-and-restore-gitlab-in-docker.md b/_archive_jekyll/_posts/2018/2018-09-24-backup-and-restore-gitlab-in-docker.md similarity index 100% rename from _posts/2018/2018-09-24-backup-and-restore-gitlab-in-docker.md rename to _archive_jekyll/_posts/2018/2018-09-24-backup-and-restore-gitlab-in-docker.md diff --git a/_posts/2018/2018-09-28-terminate-powershell-script-or-session.md b/_archive_jekyll/_posts/2018/2018-09-28-terminate-powershell-script-or-session.md similarity index 100% rename from _posts/2018/2018-09-28-terminate-powershell-script-or-session.md rename to _archive_jekyll/_posts/2018/2018-09-28-terminate-powershell-script-or-session.md diff --git a/_posts/2018/2018-10-03-update-gitlab-in-docker.md b/_archive_jekyll/_posts/2018/2018-10-03-update-gitlab-in-docker.md similarity index 100% rename from _posts/2018/2018-10-03-update-gitlab-in-docker.md rename to _archive_jekyll/_posts/2018/2018-10-03-update-gitlab-in-docker.md diff --git a/_posts/2018/2018-10-10-migrate-gitlab-in-docker.md b/_archive_jekyll/_posts/2018/2018-10-10-migrate-gitlab-in-docker.md similarity index 100% rename from _posts/2018/2018-10-10-migrate-gitlab-in-docker.md rename to _archive_jekyll/_posts/2018/2018-10-10-migrate-gitlab-in-docker.md diff --git a/_posts/2018/2018-10-18-using-gitlab-integrated-cicd-for-python-project-on-windows.md b/_archive_jekyll/_posts/2018/2018-10-18-using-gitlab-integrated-cicd-for-python-project-on-windows.md similarity index 100% rename from _posts/2018/2018-10-18-using-gitlab-integrated-cicd-for-python-project-on-windows.md rename to _archive_jekyll/_posts/2018/2018-10-18-using-gitlab-integrated-cicd-for-python-project-on-windows.md diff --git a/_posts/2018/2018-11-01-setting-pwsh-invoke-webrequest-proxy.md b/_archive_jekyll/_posts/2018/2018-11-01-setting-pwsh-invoke-webrequest-proxy.md similarity index 100% rename from _posts/2018/2018-11-01-setting-pwsh-invoke-webrequest-proxy.md rename to _archive_jekyll/_posts/2018/2018-11-01-setting-pwsh-invoke-webrequest-proxy.md diff --git a/_posts/2018/2018-11-05-creating-multiple-redis-instance-services-on-windows.md b/_archive_jekyll/_posts/2018/2018-11-05-creating-multiple-redis-instance-services-on-windows.md similarity index 100% rename from _posts/2018/2018-11-05-creating-multiple-redis-instance-services-on-windows.md rename to _archive_jekyll/_posts/2018/2018-11-05-creating-multiple-redis-instance-services-on-windows.md diff --git a/_posts/2019/2019-04-01-creating-custom-python-request-auth-class.md b/_archive_jekyll/_posts/2019/2019-04-01-creating-custom-python-request-auth-class.md similarity index 100% rename from _posts/2019/2019-04-01-creating-custom-python-request-auth-class.md rename to _archive_jekyll/_posts/2019/2019-04-01-creating-custom-python-request-auth-class.md diff --git a/_posts/2019/2019-05-14-using-python-sqlalchemy-session-in-multithreading.md b/_archive_jekyll/_posts/2019/2019-05-14-using-python-sqlalchemy-session-in-multithreading.md similarity index 100% rename from _posts/2019/2019-05-14-using-python-sqlalchemy-session-in-multithreading.md rename to _archive_jekyll/_posts/2019/2019-05-14-using-python-sqlalchemy-session-in-multithreading.md diff --git a/_posts/2019/2019-06-19-git-cheat-sheet.md b/_archive_jekyll/_posts/2019/2019-06-19-git-cheat-sheet.md similarity index 100% rename from _posts/2019/2019-06-19-git-cheat-sheet.md rename to _archive_jekyll/_posts/2019/2019-06-19-git-cheat-sheet.md diff --git a/_posts/2019/2019-07-07-installing-readthedocs-on-ubuntu-1804.md b/_archive_jekyll/_posts/2019/2019-07-07-installing-readthedocs-on-ubuntu-1804.md similarity index 100% rename from _posts/2019/2019-07-07-installing-readthedocs-on-ubuntu-1804.md rename to _archive_jekyll/_posts/2019/2019-07-07-installing-readthedocs-on-ubuntu-1804.md diff --git a/_posts/2019/2019-07-13-filtering-pandas-dataframe.md b/_archive_jekyll/_posts/2019/2019-07-13-filtering-pandas-dataframe.md similarity index 100% rename from _posts/2019/2019-07-13-filtering-pandas-dataframe.md rename to _archive_jekyll/_posts/2019/2019-07-13-filtering-pandas-dataframe.md diff --git a/_posts/2019/2019-07-21-learning-flask.md b/_archive_jekyll/_posts/2019/2019-07-21-learning-flask.md similarity index 100% rename from _posts/2019/2019-07-21-learning-flask.md rename to _archive_jekyll/_posts/2019/2019-07-21-learning-flask.md diff --git a/_posts/2019/2019-07-30-troubleshooting-python-twine-cannot-upload-package-on-windows.md b/_archive_jekyll/_posts/2019/2019-07-30-troubleshooting-python-twine-cannot-upload-package-on-windows.md similarity index 100% rename from _posts/2019/2019-07-30-troubleshooting-python-twine-cannot-upload-package-on-windows.md rename to _archive_jekyll/_posts/2019/2019-07-30-troubleshooting-python-twine-cannot-upload-package-on-windows.md diff --git a/_posts/2019/2019-09-14-fast-tcp-port-check-in-powershell.md b/_archive_jekyll/_posts/2019/2019-09-14-fast-tcp-port-check-in-powershell.md similarity index 100% rename from _posts/2019/2019-09-14-fast-tcp-port-check-in-powershell.md rename to _archive_jekyll/_posts/2019/2019-09-14-fast-tcp-port-check-in-powershell.md diff --git a/_posts/2019/2019-09-25-sqlalchemy-mixin-in-method.md b/_archive_jekyll/_posts/2019/2019-09-25-sqlalchemy-mixin-in-method.md similarity index 100% rename from _posts/2019/2019-09-25-sqlalchemy-mixin-in-method.md rename to _archive_jekyll/_posts/2019/2019-09-25-sqlalchemy-mixin-in-method.md diff --git a/_posts/2019/2019-10-27-installing-python3-on-ubuntu.md b/_archive_jekyll/_posts/2019/2019-10-27-installing-python3-on-ubuntu.md similarity index 100% rename from _posts/2019/2019-10-27-installing-python3-on-ubuntu.md rename to _archive_jekyll/_posts/2019/2019-10-27-installing-python3-on-ubuntu.md diff --git a/_posts/2019/2019-12-21-elastic-painless-scripted-field-on-null-or-mssing-value.md b/_archive_jekyll/_posts/2019/2019-12-21-elastic-painless-scripted-field-on-null-or-mssing-value.md similarity index 100% rename from _posts/2019/2019-12-21-elastic-painless-scripted-field-on-null-or-mssing-value.md rename to _archive_jekyll/_posts/2019/2019-12-21-elastic-painless-scripted-field-on-null-or-mssing-value.md diff --git a/_posts/2019/2019-12-29-Using-Powershell-to-retrieve-latest-package-url-from-github-releases.md b/_archive_jekyll/_posts/2019/2019-12-29-Using-Powershell-to-retrieve-latest-package-url-from-github-releases.md similarity index 100% rename from _posts/2019/2019-12-29-Using-Powershell-to-retrieve-latest-package-url-from-github-releases.md rename to _archive_jekyll/_posts/2019/2019-12-29-Using-Powershell-to-retrieve-latest-package-url-from-github-releases.md diff --git a/_posts/2019/2019-12-29-Using-Scoop-On-Windows.md b/_archive_jekyll/_posts/2019/2019-12-29-Using-Scoop-On-Windows.md similarity index 100% rename from _posts/2019/2019-12-29-Using-Scoop-On-Windows.md rename to _archive_jekyll/_posts/2019/2019-12-29-Using-Scoop-On-Windows.md diff --git a/_posts/2020/2020-02-01-setting-up-wsl.md b/_archive_jekyll/_posts/2020/2020-02-01-setting-up-wsl.md similarity index 100% rename from _posts/2020/2020-02-01-setting-up-wsl.md rename to _archive_jekyll/_posts/2020/2020-02-01-setting-up-wsl.md diff --git a/_posts/2020/2020-03-09-flattening-nested-dict-in-python.md b/_archive_jekyll/_posts/2020/2020-03-09-flattening-nested-dict-in-python.md similarity index 100% rename from _posts/2020/2020-03-09-flattening-nested-dict-in-python.md rename to _archive_jekyll/_posts/2020/2020-03-09-flattening-nested-dict-in-python.md diff --git a/_posts/2020/2020-04-13-fixing-ipython-on-Windows10-ConEmu-mouse-event-bug.md b/_archive_jekyll/_posts/2020/2020-04-13-fixing-ipython-on-Windows10-ConEmu-mouse-event-bug.md similarity index 100% rename from _posts/2020/2020-04-13-fixing-ipython-on-Windows10-ConEmu-mouse-event-bug.md rename to _archive_jekyll/_posts/2020/2020-04-13-fixing-ipython-on-Windows10-ConEmu-mouse-event-bug.md diff --git a/_posts/2020/2020-04-13-making-isort-compatible-with-black.md b/_archive_jekyll/_posts/2020/2020-04-13-making-isort-compatible-with-black.md similarity index 100% rename from _posts/2020/2020-04-13-making-isort-compatible-with-black.md rename to _archive_jekyll/_posts/2020/2020-04-13-making-isort-compatible-with-black.md diff --git a/_posts/2020/2020-05-05-using-python-contextmanager-to-create-a-timer-decorator.md b/_archive_jekyll/_posts/2020/2020-05-05-using-python-contextmanager-to-create-a-timer-decorator.md similarity index 100% rename from _posts/2020/2020-05-05-using-python-contextmanager-to-create-a-timer-decorator.md rename to _archive_jekyll/_posts/2020/2020-05-05-using-python-contextmanager-to-create-a-timer-decorator.md diff --git a/_posts/2020/2020-06-08-compiling-sqlalchemy-query-to-nearly-real-raw-sql-query.md b/_archive_jekyll/_posts/2020/2020-06-08-compiling-sqlalchemy-query-to-nearly-real-raw-sql-query.md similarity index 100% rename from _posts/2020/2020-06-08-compiling-sqlalchemy-query-to-nearly-real-raw-sql-query.md rename to _archive_jekyll/_posts/2020/2020-06-08-compiling-sqlalchemy-query-to-nearly-real-raw-sql-query.md diff --git a/_posts/2020/2020-07-16-rolling-back-from-flask-restplus-reqparse-to-native-flask-request-to-parse-inputs.md b/_archive_jekyll/_posts/2020/2020-07-16-rolling-back-from-flask-restplus-reqparse-to-native-flask-request-to-parse-inputs.md similarity index 100% rename from _posts/2020/2020-07-16-rolling-back-from-flask-restplus-reqparse-to-native-flask-request-to-parse-inputs.md rename to _archive_jekyll/_posts/2020/2020-07-16-rolling-back-from-flask-restplus-reqparse-to-native-flask-request-to-parse-inputs.md diff --git a/_posts/2020/2020-11-24-my-powerline.md b/_archive_jekyll/_posts/2020/2020-11-24-my-powerline.md similarity index 100% rename from _posts/2020/2020-11-24-my-powerline.md rename to _archive_jekyll/_posts/2020/2020-11-24-my-powerline.md diff --git a/_posts/2021/2021-01-04-python-lint-and-format.md b/_archive_jekyll/_posts/2021/2021-01-04-python-lint-and-format.md similarity index 100% rename from _posts/2021/2021-01-04-python-lint-and-format.md rename to _archive_jekyll/_posts/2021/2021-01-04-python-lint-and-format.md diff --git a/_posts/2021/2021-01-22-python-requests-with-retry.md b/_archive_jekyll/_posts/2021/2021-01-22-python-requests-with-retry.md similarity index 100% rename from _posts/2021/2021-01-22-python-requests-with-retry.md rename to _archive_jekyll/_posts/2021/2021-01-22-python-requests-with-retry.md diff --git a/_posts/2021/2021-03-06-trying-python-pipreqs-and-pip-tools.md b/_archive_jekyll/_posts/2021/2021-03-06-trying-python-pipreqs-and-pip-tools.md similarity index 100% rename from _posts/2021/2021-03-06-trying-python-pipreqs-and-pip-tools.md rename to _archive_jekyll/_posts/2021/2021-03-06-trying-python-pipreqs-and-pip-tools.md diff --git a/_posts/2021/2021-06-12-python-unittest-cheet-sheet.md b/_archive_jekyll/_posts/2021/2021-06-12-python-unittest-cheet-sheet.md similarity index 100% rename from _posts/2021/2021-06-12-python-unittest-cheet-sheet.md rename to _archive_jekyll/_posts/2021/2021-06-12-python-unittest-cheet-sheet.md diff --git a/_posts/2021/2021-06-27-python-datetime-utc-now.md b/_archive_jekyll/_posts/2021/2021-06-27-python-datetime-utc-now.md similarity index 100% rename from _posts/2021/2021-06-27-python-datetime-utc-now.md rename to _archive_jekyll/_posts/2021/2021-06-27-python-datetime-utc-now.md diff --git a/_posts/2021/2021-09-04-python-asyncio.md b/_archive_jekyll/_posts/2021/2021-09-04-python-asyncio.md similarity index 100% rename from _posts/2021/2021-09-04-python-asyncio.md rename to _archive_jekyll/_posts/2021/2021-09-04-python-asyncio.md diff --git a/_posts/2022/2022-01-22-azure-pipeline-predefined-variables.md b/_archive_jekyll/_posts/2022/2022-01-22-azure-pipeline-predefined-variables.md similarity index 100% rename from _posts/2022/2022-01-22-azure-pipeline-predefined-variables.md rename to _archive_jekyll/_posts/2022/2022-01-22-azure-pipeline-predefined-variables.md diff --git a/_posts/2022/2022-02-09-azure-pipeline-reuse-variables-in-template-from-another-repository.md b/_archive_jekyll/_posts/2022/2022-02-09-azure-pipeline-reuse-variables-in-template-from-another-repository.md similarity index 100% rename from _posts/2022/2022-02-09-azure-pipeline-reuse-variables-in-template-from-another-repository.md rename to _archive_jekyll/_posts/2022/2022-02-09-azure-pipeline-reuse-variables-in-template-from-another-repository.md diff --git a/_posts/2022/2022-02-19-azure-pipeline-checkout-repository-from-another-project.md b/_archive_jekyll/_posts/2022/2022-02-19-azure-pipeline-checkout-repository-from-another-project.md similarity index 100% rename from _posts/2022/2022-02-19-azure-pipeline-checkout-repository-from-another-project.md rename to _archive_jekyll/_posts/2022/2022-02-19-azure-pipeline-checkout-repository-from-another-project.md diff --git a/_posts/2022/2022-03-09-azure-pipeline-variables-and-parameters.md b/_archive_jekyll/_posts/2022/2022-03-09-azure-pipeline-variables-and-parameters.md similarity index 100% rename from _posts/2022/2022-03-09-azure-pipeline-variables-and-parameters.md rename to _archive_jekyll/_posts/2022/2022-03-09-azure-pipeline-variables-and-parameters.md diff --git a/_posts/2022/2022-03-27-manage-azure-databricks-service-principal.md b/_archive_jekyll/_posts/2022/2022-03-27-manage-azure-databricks-service-principal.md similarity index 100% rename from _posts/2022/2022-03-27-manage-azure-databricks-service-principal.md rename to _archive_jekyll/_posts/2022/2022-03-27-manage-azure-databricks-service-principal.md diff --git a/_posts/2022/2022-04-03-azure-pipeline-checkout-multiple-repositories.md b/_archive_jekyll/_posts/2022/2022-04-03-azure-pipeline-checkout-multiple-repositories.md similarity index 100% rename from _posts/2022/2022-04-03-azure-pipeline-checkout-multiple-repositories.md rename to _archive_jekyll/_posts/2022/2022-04-03-azure-pipeline-checkout-multiple-repositories.md diff --git a/_posts/2022/2022-06-08-using-databricks-connect-inside-a-container.md b/_archive_jekyll/_posts/2022/2022-06-08-using-databricks-connect-inside-a-container.md similarity index 100% rename from _posts/2022/2022-06-08-using-databricks-connect-inside-a-container.md rename to _archive_jekyll/_posts/2022/2022-06-08-using-databricks-connect-inside-a-container.md diff --git a/_posts/2022/2022-07-03-azure-pipeline-conditions.md b/_archive_jekyll/_posts/2022/2022-07-03-azure-pipeline-conditions.md similarity index 100% rename from _posts/2022/2022-07-03-azure-pipeline-conditions.md rename to _archive_jekyll/_posts/2022/2022-07-03-azure-pipeline-conditions.md diff --git a/_posts/2022/2022-07-28-databricks-job-context.md b/_archive_jekyll/_posts/2022/2022-07-28-databricks-job-context.md similarity index 100% rename from _posts/2022/2022-07-28-databricks-job-context.md rename to _archive_jekyll/_posts/2022/2022-07-28-databricks-job-context.md diff --git a/_posts/2022/2022-08-14-azure-pipeline-jobs.md b/_archive_jekyll/_posts/2022/2022-08-14-azure-pipeline-jobs.md similarity index 100% rename from _posts/2022/2022-08-14-azure-pipeline-jobs.md rename to _archive_jekyll/_posts/2022/2022-08-14-azure-pipeline-jobs.md diff --git a/_posts/2022/2022-09-12-azure-pipeline-system-access-token-in-shared-pipeline.md b/_archive_jekyll/_posts/2022/2022-09-12-azure-pipeline-system-access-token-in-shared-pipeline.md similarity index 100% rename from _posts/2022/2022-09-12-azure-pipeline-system-access-token-in-shared-pipeline.md rename to _archive_jekyll/_posts/2022/2022-09-12-azure-pipeline-system-access-token-in-shared-pipeline.md diff --git a/_posts/2022/2022-09-15-adding-data-files-to-python-package-with-setup-py.md b/_archive_jekyll/_posts/2022/2022-09-15-adding-data-files-to-python-package-with-setup-py.md similarity index 100% rename from _posts/2022/2022-09-15-adding-data-files-to-python-package-with-setup-py.md rename to _archive_jekyll/_posts/2022/2022-09-15-adding-data-files-to-python-package-with-setup-py.md diff --git a/_posts/2022/2022-09-20-databricks-cluster-access-mode.md b/_archive_jekyll/_posts/2022/2022-09-20-databricks-cluster-access-mode.md similarity index 100% rename from _posts/2022/2022-09-20-databricks-cluster-access-mode.md rename to _archive_jekyll/_posts/2022/2022-09-20-databricks-cluster-access-mode.md diff --git a/_posts/2022/2022-11-09-azure-pipeline-delete-blobs-from-blob-storage.md b/_archive_jekyll/_posts/2022/2022-11-09-azure-pipeline-delete-blobs-from-blob-storage.md similarity index 100% rename from _posts/2022/2022-11-09-azure-pipeline-delete-blobs-from-blob-storage.md rename to _archive_jekyll/_posts/2022/2022-11-09-azure-pipeline-delete-blobs-from-blob-storage.md diff --git a/_posts/2022/2022-11-13-azure-pipeline-windows-agent-UnicodeEncodeError.md b/_archive_jekyll/_posts/2022/2022-11-13-azure-pipeline-windows-agent-UnicodeEncodeError.md similarity index 100% rename from _posts/2022/2022-11-13-azure-pipeline-windows-agent-UnicodeEncodeError.md rename to _archive_jekyll/_posts/2022/2022-11-13-azure-pipeline-windows-agent-UnicodeEncodeError.md diff --git a/_posts/2022/2022-11-15-using-ast-and-cst-to-change-python-code.md b/_archive_jekyll/_posts/2022/2022-11-15-using-ast-and-cst-to-change-python-code.md similarity index 100% rename from _posts/2022/2022-11-15-using-ast-and-cst-to-change-python-code.md rename to _archive_jekyll/_posts/2022/2022-11-15-using-ast-and-cst-to-change-python-code.md diff --git a/_posts/2022/2022-12-01-python-difference-on-subprocess-run-call-check-call-check-output.md b/_archive_jekyll/_posts/2022/2022-12-01-python-difference-on-subprocess-run-call-check-call-check-output.md similarity index 100% rename from _posts/2022/2022-12-01-python-difference-on-subprocess-run-call-check-call-check-output.md rename to _archive_jekyll/_posts/2022/2022-12-01-python-difference-on-subprocess-run-call-check-call-check-output.md diff --git a/_posts/2022/2022-12-03-syncing-repository-from-github-to-gitee.md b/_archive_jekyll/_posts/2022/2022-12-03-syncing-repository-from-github-to-gitee.md similarity index 100% rename from _posts/2022/2022-12-03-syncing-repository-from-github-to-gitee.md rename to _archive_jekyll/_posts/2022/2022-12-03-syncing-repository-from-github-to-gitee.md diff --git a/_posts/2022/2022-12-30-azure-pipeline-expressions.md b/_archive_jekyll/_posts/2022/2022-12-30-azure-pipeline-expressions.md similarity index 100% rename from _posts/2022/2022-12-30-azure-pipeline-expressions.md rename to _archive_jekyll/_posts/2022/2022-12-30-azure-pipeline-expressions.md diff --git a/_posts/2023/2023-01-04-python-aiohttp-rate-limit.md b/_archive_jekyll/_posts/2023/2023-01-04-python-aiohttp-rate-limit.md similarity index 100% rename from _posts/2023/2023-01-04-python-aiohttp-rate-limit.md rename to _archive_jekyll/_posts/2023/2023-01-04-python-aiohttp-rate-limit.md diff --git a/_posts/2023/2023-01-05-calling-azure-rest-api.md b/_archive_jekyll/_posts/2023/2023-01-05-calling-azure-rest-api.md similarity index 100% rename from _posts/2023/2023-01-05-calling-azure-rest-api.md rename to _archive_jekyll/_posts/2023/2023-01-05-calling-azure-rest-api.md diff --git a/_posts/2023/2023-01-28-sonarcloud-github-action.md b/_archive_jekyll/_posts/2023/2023-01-28-sonarcloud-github-action.md similarity index 100% rename from _posts/2023/2023-01-28-sonarcloud-github-action.md rename to _archive_jekyll/_posts/2023/2023-01-28-sonarcloud-github-action.md diff --git a/_posts/2023/2023-05-26-searching-azcli-packages-installation-path.md b/_archive_jekyll/_posts/2023/2023-05-26-searching-azcli-packages-installation-path.md similarity index 100% rename from _posts/2023/2023-05-26-searching-azcli-packages-installation-path.md rename to _archive_jekyll/_posts/2023/2023-05-26-searching-azcli-packages-installation-path.md diff --git a/_posts/2023/2023-07-04-python-asyncio-unittest.md b/_archive_jekyll/_posts/2023/2023-07-04-python-asyncio-unittest.md similarity index 100% rename from _posts/2023/2023-07-04-python-asyncio-unittest.md rename to _archive_jekyll/_posts/2023/2023-07-04-python-asyncio-unittest.md diff --git a/_posts/2023/2023-09-04-different-ssh-keys-for-different-github.com-accounts.md b/_archive_jekyll/_posts/2023/2023-09-04-different-ssh-keys-for-different-github.com-accounts.md similarity index 100% rename from _posts/2023/2023-09-04-different-ssh-keys-for-different-github.com-accounts.md rename to _archive_jekyll/_posts/2023/2023-09-04-different-ssh-keys-for-different-github.com-accounts.md diff --git a/_posts/2023/2023-09-14-python-asyncio.md b/_archive_jekyll/_posts/2023/2023-09-14-python-asyncio.md similarity index 100% rename from _posts/2023/2023-09-14-python-asyncio.md rename to _archive_jekyll/_posts/2023/2023-09-14-python-asyncio.md diff --git a/_posts/2023/2023-09-19-github-actions-cache.md b/_archive_jekyll/_posts/2023/2023-09-19-github-actions-cache.md similarity index 100% rename from _posts/2023/2023-09-19-github-actions-cache.md rename to _archive_jekyll/_posts/2023/2023-09-19-github-actions-cache.md diff --git a/_posts/2023/2023-09-19-github-actions-custom-actions.md b/_archive_jekyll/_posts/2023/2023-09-19-github-actions-custom-actions.md similarity index 100% rename from _posts/2023/2023-09-19-github-actions-custom-actions.md rename to _archive_jekyll/_posts/2023/2023-09-19-github-actions-custom-actions.md diff --git a/_posts/2023/2023-09-19-github-actions-environment.md b/_archive_jekyll/_posts/2023/2023-09-19-github-actions-environment.md similarity index 100% rename from _posts/2023/2023-09-19-github-actions-environment.md rename to _archive_jekyll/_posts/2023/2023-09-19-github-actions-environment.md diff --git a/_posts/2023/2023-09-19-github-actions-variables.md b/_archive_jekyll/_posts/2023/2023-09-19-github-actions-variables.md similarity index 100% rename from _posts/2023/2023-09-19-github-actions-variables.md rename to _archive_jekyll/_posts/2023/2023-09-19-github-actions-variables.md diff --git a/_posts/2023/2023-09-20-github-actions-error-handling.md b/_archive_jekyll/_posts/2023/2023-09-20-github-actions-error-handling.md similarity index 100% rename from _posts/2023/2023-09-20-github-actions-error-handling.md rename to _archive_jekyll/_posts/2023/2023-09-20-github-actions-error-handling.md diff --git a/_posts/2023/2023-09-21-github-actions-workflows.md b/_archive_jekyll/_posts/2023/2023-09-21-github-actions-workflows.md similarity index 100% rename from _posts/2023/2023-09-21-github-actions-workflows.md rename to _archive_jekyll/_posts/2023/2023-09-21-github-actions-workflows.md diff --git a/_posts/2023/2023-09-22-databricks-python-pip-authentication.md b/_archive_jekyll/_posts/2023/2023-09-22-databricks-python-pip-authentication.md similarity index 100% rename from _posts/2023/2023-09-22-databricks-python-pip-authentication.md rename to _archive_jekyll/_posts/2023/2023-09-22-databricks-python-pip-authentication.md diff --git a/_posts/2023/2023-09-22-github-actions-python.md b/_archive_jekyll/_posts/2023/2023-09-22-github-actions-python.md similarity index 100% rename from _posts/2023/2023-09-22-github-actions-python.md rename to _archive_jekyll/_posts/2023/2023-09-22-github-actions-python.md diff --git a/_posts/2023/2023-10-16-github-actions-get-azure-keyvault-secrets-action.md b/_archive_jekyll/_posts/2023/2023-10-16-github-actions-get-azure-keyvault-secrets-action.md similarity index 100% rename from _posts/2023/2023-10-16-github-actions-get-azure-keyvault-secrets-action.md rename to _archive_jekyll/_posts/2023/2023-10-16-github-actions-get-azure-keyvault-secrets-action.md diff --git a/_posts/2023/2023-10-21-hashing-files.md b/_archive_jekyll/_posts/2023/2023-10-21-hashing-files.md similarity index 100% rename from _posts/2023/2023-10-21-hashing-files.md rename to _archive_jekyll/_posts/2023/2023-10-21-hashing-files.md diff --git a/_posts/2023/2023-11-08-github-actions-bash-shell-pipefail.md b/_archive_jekyll/_posts/2023/2023-11-08-github-actions-bash-shell-pipefail.md similarity index 100% rename from _posts/2023/2023-11-08-github-actions-bash-shell-pipefail.md rename to _archive_jekyll/_posts/2023/2023-11-08-github-actions-bash-shell-pipefail.md diff --git a/_posts/2023/2023-11-18-Some-nice-CICD-bash-common-scripts.md b/_archive_jekyll/_posts/2023/2023-11-18-Some-nice-CICD-bash-common-scripts.md similarity index 100% rename from _posts/2023/2023-11-18-Some-nice-CICD-bash-common-scripts.md rename to _archive_jekyll/_posts/2023/2023-11-18-Some-nice-CICD-bash-common-scripts.md diff --git a/_posts/2023/2023-11-18-github-actions-deploy-static-files-to-azure-web-app.md b/_archive_jekyll/_posts/2023/2023-11-18-github-actions-deploy-static-files-to-azure-web-app.md similarity index 100% rename from _posts/2023/2023-11-18-github-actions-deploy-static-files-to-azure-web-app.md rename to _archive_jekyll/_posts/2023/2023-11-18-github-actions-deploy-static-files-to-azure-web-app.md diff --git a/_sass/minimal-mistakes.scss b/_archive_jekyll/_sass/minimal-mistakes.scss similarity index 100% rename from _sass/minimal-mistakes.scss rename to _archive_jekyll/_sass/minimal-mistakes.scss diff --git a/_sass/minimal-mistakes/_animations.scss b/_archive_jekyll/_sass/minimal-mistakes/_animations.scss similarity index 100% rename from _sass/minimal-mistakes/_animations.scss rename to _archive_jekyll/_sass/minimal-mistakes/_animations.scss diff --git a/_sass/minimal-mistakes/_archive.scss b/_archive_jekyll/_sass/minimal-mistakes/_archive.scss similarity index 100% rename from _sass/minimal-mistakes/_archive.scss rename to _archive_jekyll/_sass/minimal-mistakes/_archive.scss diff --git a/_sass/minimal-mistakes/_base.scss b/_archive_jekyll/_sass/minimal-mistakes/_base.scss similarity index 100% rename from _sass/minimal-mistakes/_base.scss rename to _archive_jekyll/_sass/minimal-mistakes/_base.scss diff --git a/_sass/minimal-mistakes/_buttons.scss b/_archive_jekyll/_sass/minimal-mistakes/_buttons.scss similarity index 100% rename from _sass/minimal-mistakes/_buttons.scss rename to _archive_jekyll/_sass/minimal-mistakes/_buttons.scss diff --git a/_sass/minimal-mistakes/_footer.scss b/_archive_jekyll/_sass/minimal-mistakes/_footer.scss similarity index 100% rename from _sass/minimal-mistakes/_footer.scss rename to _archive_jekyll/_sass/minimal-mistakes/_footer.scss diff --git a/_sass/minimal-mistakes/_forms.scss b/_archive_jekyll/_sass/minimal-mistakes/_forms.scss similarity index 100% rename from _sass/minimal-mistakes/_forms.scss rename to _archive_jekyll/_sass/minimal-mistakes/_forms.scss diff --git a/_sass/minimal-mistakes/_masthead.scss b/_archive_jekyll/_sass/minimal-mistakes/_masthead.scss similarity index 100% rename from _sass/minimal-mistakes/_masthead.scss rename to _archive_jekyll/_sass/minimal-mistakes/_masthead.scss diff --git a/_sass/minimal-mistakes/_mixins.scss b/_archive_jekyll/_sass/minimal-mistakes/_mixins.scss similarity index 100% rename from _sass/minimal-mistakes/_mixins.scss rename to _archive_jekyll/_sass/minimal-mistakes/_mixins.scss diff --git a/_sass/minimal-mistakes/_navigation.scss b/_archive_jekyll/_sass/minimal-mistakes/_navigation.scss similarity index 100% rename from _sass/minimal-mistakes/_navigation.scss rename to _archive_jekyll/_sass/minimal-mistakes/_navigation.scss diff --git a/_sass/minimal-mistakes/_notices.scss b/_archive_jekyll/_sass/minimal-mistakes/_notices.scss similarity index 100% rename from _sass/minimal-mistakes/_notices.scss rename to _archive_jekyll/_sass/minimal-mistakes/_notices.scss diff --git a/_sass/minimal-mistakes/_page.scss b/_archive_jekyll/_sass/minimal-mistakes/_page.scss similarity index 100% rename from _sass/minimal-mistakes/_page.scss rename to _archive_jekyll/_sass/minimal-mistakes/_page.scss diff --git a/_sass/minimal-mistakes/_print.scss b/_archive_jekyll/_sass/minimal-mistakes/_print.scss similarity index 100% rename from _sass/minimal-mistakes/_print.scss rename to _archive_jekyll/_sass/minimal-mistakes/_print.scss diff --git a/_sass/minimal-mistakes/_reset.scss b/_archive_jekyll/_sass/minimal-mistakes/_reset.scss similarity index 100% rename from _sass/minimal-mistakes/_reset.scss rename to _archive_jekyll/_sass/minimal-mistakes/_reset.scss diff --git a/_sass/minimal-mistakes/_search.scss b/_archive_jekyll/_sass/minimal-mistakes/_search.scss similarity index 100% rename from _sass/minimal-mistakes/_search.scss rename to _archive_jekyll/_sass/minimal-mistakes/_search.scss diff --git a/_sass/minimal-mistakes/_sidebar.scss b/_archive_jekyll/_sass/minimal-mistakes/_sidebar.scss similarity index 100% rename from _sass/minimal-mistakes/_sidebar.scss rename to _archive_jekyll/_sass/minimal-mistakes/_sidebar.scss diff --git a/_sass/minimal-mistakes/_syntax.scss b/_archive_jekyll/_sass/minimal-mistakes/_syntax.scss similarity index 100% rename from _sass/minimal-mistakes/_syntax.scss rename to _archive_jekyll/_sass/minimal-mistakes/_syntax.scss diff --git a/_sass/minimal-mistakes/_tables.scss b/_archive_jekyll/_sass/minimal-mistakes/_tables.scss similarity index 100% rename from _sass/minimal-mistakes/_tables.scss rename to _archive_jekyll/_sass/minimal-mistakes/_tables.scss diff --git a/_sass/minimal-mistakes/_utilities.scss b/_archive_jekyll/_sass/minimal-mistakes/_utilities.scss similarity index 100% rename from _sass/minimal-mistakes/_utilities.scss rename to _archive_jekyll/_sass/minimal-mistakes/_utilities.scss diff --git a/_sass/minimal-mistakes/_variables.scss b/_archive_jekyll/_sass/minimal-mistakes/_variables.scss similarity index 100% rename from _sass/minimal-mistakes/_variables.scss rename to _archive_jekyll/_sass/minimal-mistakes/_variables.scss diff --git a/_sass/minimal-mistakes/skins/_air.scss b/_archive_jekyll/_sass/minimal-mistakes/skins/_air.scss similarity index 100% rename from _sass/minimal-mistakes/skins/_air.scss rename to _archive_jekyll/_sass/minimal-mistakes/skins/_air.scss diff --git a/_sass/minimal-mistakes/skins/_aqua.scss b/_archive_jekyll/_sass/minimal-mistakes/skins/_aqua.scss similarity index 100% rename from _sass/minimal-mistakes/skins/_aqua.scss rename to _archive_jekyll/_sass/minimal-mistakes/skins/_aqua.scss diff --git a/_sass/minimal-mistakes/skins/_contrast.scss b/_archive_jekyll/_sass/minimal-mistakes/skins/_contrast.scss similarity index 100% rename from _sass/minimal-mistakes/skins/_contrast.scss rename to _archive_jekyll/_sass/minimal-mistakes/skins/_contrast.scss diff --git a/_sass/minimal-mistakes/skins/_dark.scss b/_archive_jekyll/_sass/minimal-mistakes/skins/_dark.scss similarity index 100% rename from _sass/minimal-mistakes/skins/_dark.scss rename to _archive_jekyll/_sass/minimal-mistakes/skins/_dark.scss diff --git a/_sass/minimal-mistakes/skins/_default.scss b/_archive_jekyll/_sass/minimal-mistakes/skins/_default.scss similarity index 100% rename from _sass/minimal-mistakes/skins/_default.scss rename to _archive_jekyll/_sass/minimal-mistakes/skins/_default.scss diff --git a/_sass/minimal-mistakes/skins/_dirt.scss b/_archive_jekyll/_sass/minimal-mistakes/skins/_dirt.scss similarity index 100% rename from _sass/minimal-mistakes/skins/_dirt.scss rename to _archive_jekyll/_sass/minimal-mistakes/skins/_dirt.scss diff --git a/_sass/minimal-mistakes/skins/_mint.scss b/_archive_jekyll/_sass/minimal-mistakes/skins/_mint.scss similarity index 100% rename from _sass/minimal-mistakes/skins/_mint.scss rename to _archive_jekyll/_sass/minimal-mistakes/skins/_mint.scss diff --git a/_sass/minimal-mistakes/skins/_neon.scss b/_archive_jekyll/_sass/minimal-mistakes/skins/_neon.scss similarity index 100% rename from _sass/minimal-mistakes/skins/_neon.scss rename to _archive_jekyll/_sass/minimal-mistakes/skins/_neon.scss diff --git a/_sass/minimal-mistakes/skins/_plum.scss b/_archive_jekyll/_sass/minimal-mistakes/skins/_plum.scss similarity index 100% rename from _sass/minimal-mistakes/skins/_plum.scss rename to _archive_jekyll/_sass/minimal-mistakes/skins/_plum.scss diff --git a/_sass/minimal-mistakes/skins/_sunrise.scss b/_archive_jekyll/_sass/minimal-mistakes/skins/_sunrise.scss similarity index 100% rename from _sass/minimal-mistakes/skins/_sunrise.scss rename to _archive_jekyll/_sass/minimal-mistakes/skins/_sunrise.scss diff --git a/_sass/minimal-mistakes/vendor/breakpoint/_breakpoint.scss b/_archive_jekyll/_sass/minimal-mistakes/vendor/breakpoint/_breakpoint.scss similarity index 100% rename from _sass/minimal-mistakes/vendor/breakpoint/_breakpoint.scss rename to _archive_jekyll/_sass/minimal-mistakes/vendor/breakpoint/_breakpoint.scss diff --git a/_sass/minimal-mistakes/vendor/breakpoint/_context.scss b/_archive_jekyll/_sass/minimal-mistakes/vendor/breakpoint/_context.scss similarity index 100% rename from _sass/minimal-mistakes/vendor/breakpoint/_context.scss rename to _archive_jekyll/_sass/minimal-mistakes/vendor/breakpoint/_context.scss diff --git a/_sass/minimal-mistakes/vendor/breakpoint/_helpers.scss b/_archive_jekyll/_sass/minimal-mistakes/vendor/breakpoint/_helpers.scss similarity index 100% rename from _sass/minimal-mistakes/vendor/breakpoint/_helpers.scss rename to _archive_jekyll/_sass/minimal-mistakes/vendor/breakpoint/_helpers.scss diff --git a/_sass/minimal-mistakes/vendor/breakpoint/_legacy-settings.scss b/_archive_jekyll/_sass/minimal-mistakes/vendor/breakpoint/_legacy-settings.scss similarity index 100% rename from _sass/minimal-mistakes/vendor/breakpoint/_legacy-settings.scss rename to _archive_jekyll/_sass/minimal-mistakes/vendor/breakpoint/_legacy-settings.scss diff --git a/_sass/minimal-mistakes/vendor/breakpoint/_no-query.scss b/_archive_jekyll/_sass/minimal-mistakes/vendor/breakpoint/_no-query.scss similarity index 100% rename from _sass/minimal-mistakes/vendor/breakpoint/_no-query.scss rename to _archive_jekyll/_sass/minimal-mistakes/vendor/breakpoint/_no-query.scss diff --git a/_sass/minimal-mistakes/vendor/breakpoint/_parsers.scss b/_archive_jekyll/_sass/minimal-mistakes/vendor/breakpoint/_parsers.scss similarity index 100% rename from _sass/minimal-mistakes/vendor/breakpoint/_parsers.scss rename to _archive_jekyll/_sass/minimal-mistakes/vendor/breakpoint/_parsers.scss diff --git a/_sass/minimal-mistakes/vendor/breakpoint/_respond-to.scss b/_archive_jekyll/_sass/minimal-mistakes/vendor/breakpoint/_respond-to.scss similarity index 100% rename from _sass/minimal-mistakes/vendor/breakpoint/_respond-to.scss rename to _archive_jekyll/_sass/minimal-mistakes/vendor/breakpoint/_respond-to.scss diff --git a/_sass/minimal-mistakes/vendor/breakpoint/_settings.scss b/_archive_jekyll/_sass/minimal-mistakes/vendor/breakpoint/_settings.scss similarity index 100% rename from _sass/minimal-mistakes/vendor/breakpoint/_settings.scss rename to _archive_jekyll/_sass/minimal-mistakes/vendor/breakpoint/_settings.scss diff --git a/_sass/minimal-mistakes/vendor/breakpoint/parsers/_double.scss b/_archive_jekyll/_sass/minimal-mistakes/vendor/breakpoint/parsers/_double.scss similarity index 100% rename from _sass/minimal-mistakes/vendor/breakpoint/parsers/_double.scss rename to _archive_jekyll/_sass/minimal-mistakes/vendor/breakpoint/parsers/_double.scss diff --git a/_sass/minimal-mistakes/vendor/breakpoint/parsers/_query.scss b/_archive_jekyll/_sass/minimal-mistakes/vendor/breakpoint/parsers/_query.scss similarity index 100% rename from _sass/minimal-mistakes/vendor/breakpoint/parsers/_query.scss rename to _archive_jekyll/_sass/minimal-mistakes/vendor/breakpoint/parsers/_query.scss diff --git a/_sass/minimal-mistakes/vendor/breakpoint/parsers/_resolution.scss b/_archive_jekyll/_sass/minimal-mistakes/vendor/breakpoint/parsers/_resolution.scss similarity index 100% rename from _sass/minimal-mistakes/vendor/breakpoint/parsers/_resolution.scss rename to _archive_jekyll/_sass/minimal-mistakes/vendor/breakpoint/parsers/_resolution.scss diff --git a/_sass/minimal-mistakes/vendor/breakpoint/parsers/_single.scss b/_archive_jekyll/_sass/minimal-mistakes/vendor/breakpoint/parsers/_single.scss similarity index 100% rename from _sass/minimal-mistakes/vendor/breakpoint/parsers/_single.scss rename to _archive_jekyll/_sass/minimal-mistakes/vendor/breakpoint/parsers/_single.scss diff --git a/_sass/minimal-mistakes/vendor/breakpoint/parsers/_triple.scss b/_archive_jekyll/_sass/minimal-mistakes/vendor/breakpoint/parsers/_triple.scss similarity index 100% rename from _sass/minimal-mistakes/vendor/breakpoint/parsers/_triple.scss rename to _archive_jekyll/_sass/minimal-mistakes/vendor/breakpoint/parsers/_triple.scss diff --git a/_sass/minimal-mistakes/vendor/breakpoint/parsers/double/_default-pair.scss b/_archive_jekyll/_sass/minimal-mistakes/vendor/breakpoint/parsers/double/_default-pair.scss similarity index 100% rename from _sass/minimal-mistakes/vendor/breakpoint/parsers/double/_default-pair.scss rename to _archive_jekyll/_sass/minimal-mistakes/vendor/breakpoint/parsers/double/_default-pair.scss diff --git a/_sass/minimal-mistakes/vendor/breakpoint/parsers/double/_default.scss b/_archive_jekyll/_sass/minimal-mistakes/vendor/breakpoint/parsers/double/_default.scss similarity index 100% rename from _sass/minimal-mistakes/vendor/breakpoint/parsers/double/_default.scss rename to _archive_jekyll/_sass/minimal-mistakes/vendor/breakpoint/parsers/double/_default.scss diff --git a/_sass/minimal-mistakes/vendor/breakpoint/parsers/double/_double-string.scss b/_archive_jekyll/_sass/minimal-mistakes/vendor/breakpoint/parsers/double/_double-string.scss similarity index 100% rename from _sass/minimal-mistakes/vendor/breakpoint/parsers/double/_double-string.scss rename to _archive_jekyll/_sass/minimal-mistakes/vendor/breakpoint/parsers/double/_double-string.scss diff --git a/_sass/minimal-mistakes/vendor/breakpoint/parsers/resolution/_resolution.scss b/_archive_jekyll/_sass/minimal-mistakes/vendor/breakpoint/parsers/resolution/_resolution.scss similarity index 100% rename from _sass/minimal-mistakes/vendor/breakpoint/parsers/resolution/_resolution.scss rename to _archive_jekyll/_sass/minimal-mistakes/vendor/breakpoint/parsers/resolution/_resolution.scss diff --git a/_sass/minimal-mistakes/vendor/breakpoint/parsers/single/_default.scss b/_archive_jekyll/_sass/minimal-mistakes/vendor/breakpoint/parsers/single/_default.scss similarity index 100% rename from _sass/minimal-mistakes/vendor/breakpoint/parsers/single/_default.scss rename to _archive_jekyll/_sass/minimal-mistakes/vendor/breakpoint/parsers/single/_default.scss diff --git a/_sass/minimal-mistakes/vendor/breakpoint/parsers/triple/_default.scss b/_archive_jekyll/_sass/minimal-mistakes/vendor/breakpoint/parsers/triple/_default.scss similarity index 100% rename from _sass/minimal-mistakes/vendor/breakpoint/parsers/triple/_default.scss rename to _archive_jekyll/_sass/minimal-mistakes/vendor/breakpoint/parsers/triple/_default.scss diff --git a/_sass/minimal-mistakes/vendor/magnific-popup/_magnific-popup.scss b/_archive_jekyll/_sass/minimal-mistakes/vendor/magnific-popup/_magnific-popup.scss similarity index 100% rename from _sass/minimal-mistakes/vendor/magnific-popup/_magnific-popup.scss rename to _archive_jekyll/_sass/minimal-mistakes/vendor/magnific-popup/_magnific-popup.scss diff --git a/_sass/minimal-mistakes/vendor/magnific-popup/_settings.scss b/_archive_jekyll/_sass/minimal-mistakes/vendor/magnific-popup/_settings.scss similarity index 100% rename from _sass/minimal-mistakes/vendor/magnific-popup/_settings.scss rename to _archive_jekyll/_sass/minimal-mistakes/vendor/magnific-popup/_settings.scss diff --git a/_sass/minimal-mistakes/vendor/susy/_su.scss b/_archive_jekyll/_sass/minimal-mistakes/vendor/susy/_su.scss similarity index 100% rename from _sass/minimal-mistakes/vendor/susy/_su.scss rename to _archive_jekyll/_sass/minimal-mistakes/vendor/susy/_su.scss diff --git a/_sass/minimal-mistakes/vendor/susy/_susy-prefix.scss b/_archive_jekyll/_sass/minimal-mistakes/vendor/susy/_susy-prefix.scss similarity index 100% rename from _sass/minimal-mistakes/vendor/susy/_susy-prefix.scss rename to _archive_jekyll/_sass/minimal-mistakes/vendor/susy/_susy-prefix.scss diff --git a/_sass/minimal-mistakes/vendor/susy/_susy.scss b/_archive_jekyll/_sass/minimal-mistakes/vendor/susy/_susy.scss similarity index 100% rename from _sass/minimal-mistakes/vendor/susy/_susy.scss rename to _archive_jekyll/_sass/minimal-mistakes/vendor/susy/_susy.scss diff --git a/_sass/minimal-mistakes/vendor/susy/plugins/_svg-grid.scss b/_archive_jekyll/_sass/minimal-mistakes/vendor/susy/plugins/_svg-grid.scss similarity index 100% rename from _sass/minimal-mistakes/vendor/susy/plugins/_svg-grid.scss rename to _archive_jekyll/_sass/minimal-mistakes/vendor/susy/plugins/_svg-grid.scss diff --git a/_sass/minimal-mistakes/vendor/susy/plugins/svg-grid/_prefix.scss b/_archive_jekyll/_sass/minimal-mistakes/vendor/susy/plugins/svg-grid/_prefix.scss similarity index 100% rename from _sass/minimal-mistakes/vendor/susy/plugins/svg-grid/_prefix.scss rename to _archive_jekyll/_sass/minimal-mistakes/vendor/susy/plugins/svg-grid/_prefix.scss diff --git a/_sass/minimal-mistakes/vendor/susy/plugins/svg-grid/_svg-api.scss b/_archive_jekyll/_sass/minimal-mistakes/vendor/susy/plugins/svg-grid/_svg-api.scss similarity index 100% rename from _sass/minimal-mistakes/vendor/susy/plugins/svg-grid/_svg-api.scss rename to _archive_jekyll/_sass/minimal-mistakes/vendor/susy/plugins/svg-grid/_svg-api.scss diff --git a/_sass/minimal-mistakes/vendor/susy/plugins/svg-grid/_svg-grid-math.scss b/_archive_jekyll/_sass/minimal-mistakes/vendor/susy/plugins/svg-grid/_svg-grid-math.scss similarity index 100% rename from _sass/minimal-mistakes/vendor/susy/plugins/svg-grid/_svg-grid-math.scss rename to _archive_jekyll/_sass/minimal-mistakes/vendor/susy/plugins/svg-grid/_svg-grid-math.scss diff --git a/_sass/minimal-mistakes/vendor/susy/plugins/svg-grid/_svg-settings.scss b/_archive_jekyll/_sass/minimal-mistakes/vendor/susy/plugins/svg-grid/_svg-settings.scss similarity index 100% rename from _sass/minimal-mistakes/vendor/susy/plugins/svg-grid/_svg-settings.scss rename to _archive_jekyll/_sass/minimal-mistakes/vendor/susy/plugins/svg-grid/_svg-settings.scss diff --git a/_sass/minimal-mistakes/vendor/susy/plugins/svg-grid/_svg-unprefix.scss b/_archive_jekyll/_sass/minimal-mistakes/vendor/susy/plugins/svg-grid/_svg-unprefix.scss similarity index 100% rename from _sass/minimal-mistakes/vendor/susy/plugins/svg-grid/_svg-unprefix.scss rename to _archive_jekyll/_sass/minimal-mistakes/vendor/susy/plugins/svg-grid/_svg-unprefix.scss diff --git a/_sass/minimal-mistakes/vendor/susy/plugins/svg-grid/_svg-utilities.scss b/_archive_jekyll/_sass/minimal-mistakes/vendor/susy/plugins/svg-grid/_svg-utilities.scss similarity index 100% rename from _sass/minimal-mistakes/vendor/susy/plugins/svg-grid/_svg-utilities.scss rename to _archive_jekyll/_sass/minimal-mistakes/vendor/susy/plugins/svg-grid/_svg-utilities.scss diff --git a/_sass/minimal-mistakes/vendor/susy/susy/_api.scss b/_archive_jekyll/_sass/minimal-mistakes/vendor/susy/susy/_api.scss similarity index 100% rename from _sass/minimal-mistakes/vendor/susy/susy/_api.scss rename to _archive_jekyll/_sass/minimal-mistakes/vendor/susy/susy/_api.scss diff --git a/_sass/minimal-mistakes/vendor/susy/susy/_normalize.scss b/_archive_jekyll/_sass/minimal-mistakes/vendor/susy/susy/_normalize.scss similarity index 100% rename from _sass/minimal-mistakes/vendor/susy/susy/_normalize.scss rename to _archive_jekyll/_sass/minimal-mistakes/vendor/susy/susy/_normalize.scss diff --git a/_sass/minimal-mistakes/vendor/susy/susy/_parse.scss b/_archive_jekyll/_sass/minimal-mistakes/vendor/susy/susy/_parse.scss similarity index 100% rename from _sass/minimal-mistakes/vendor/susy/susy/_parse.scss rename to _archive_jekyll/_sass/minimal-mistakes/vendor/susy/susy/_parse.scss diff --git a/_sass/minimal-mistakes/vendor/susy/susy/_settings.scss b/_archive_jekyll/_sass/minimal-mistakes/vendor/susy/susy/_settings.scss similarity index 100% rename from _sass/minimal-mistakes/vendor/susy/susy/_settings.scss rename to _archive_jekyll/_sass/minimal-mistakes/vendor/susy/susy/_settings.scss diff --git a/_sass/minimal-mistakes/vendor/susy/susy/_su-math.scss b/_archive_jekyll/_sass/minimal-mistakes/vendor/susy/susy/_su-math.scss similarity index 100% rename from _sass/minimal-mistakes/vendor/susy/susy/_su-math.scss rename to _archive_jekyll/_sass/minimal-mistakes/vendor/susy/susy/_su-math.scss diff --git a/_sass/minimal-mistakes/vendor/susy/susy/_su-validate.scss b/_archive_jekyll/_sass/minimal-mistakes/vendor/susy/susy/_su-validate.scss similarity index 100% rename from _sass/minimal-mistakes/vendor/susy/susy/_su-validate.scss rename to _archive_jekyll/_sass/minimal-mistakes/vendor/susy/susy/_su-validate.scss diff --git a/_sass/minimal-mistakes/vendor/susy/susy/_syntax-helpers.scss b/_archive_jekyll/_sass/minimal-mistakes/vendor/susy/susy/_syntax-helpers.scss similarity index 100% rename from _sass/minimal-mistakes/vendor/susy/susy/_syntax-helpers.scss rename to _archive_jekyll/_sass/minimal-mistakes/vendor/susy/susy/_syntax-helpers.scss diff --git a/_sass/minimal-mistakes/vendor/susy/susy/_unprefix.scss b/_archive_jekyll/_sass/minimal-mistakes/vendor/susy/susy/_unprefix.scss similarity index 100% rename from _sass/minimal-mistakes/vendor/susy/susy/_unprefix.scss rename to _archive_jekyll/_sass/minimal-mistakes/vendor/susy/susy/_unprefix.scss diff --git a/_sass/minimal-mistakes/vendor/susy/susy/_utilities.scss b/_archive_jekyll/_sass/minimal-mistakes/vendor/susy/susy/_utilities.scss similarity index 100% rename from _sass/minimal-mistakes/vendor/susy/susy/_utilities.scss rename to _archive_jekyll/_sass/minimal-mistakes/vendor/susy/susy/_utilities.scss diff --git a/assets/css/main.scss b/_archive_jekyll/assets/css/main.scss similarity index 100% rename from assets/css/main.scss rename to _archive_jekyll/assets/css/main.scss diff --git a/assets/js/_main.js b/_archive_jekyll/assets/js/_main.js similarity index 100% rename from assets/js/_main.js rename to _archive_jekyll/assets/js/_main.js diff --git a/assets/js/lunr/lunr-en.js b/_archive_jekyll/assets/js/lunr/lunr-en.js similarity index 100% rename from assets/js/lunr/lunr-en.js rename to _archive_jekyll/assets/js/lunr/lunr-en.js diff --git a/assets/js/lunr/lunr-gr.js b/_archive_jekyll/assets/js/lunr/lunr-gr.js similarity index 100% rename from assets/js/lunr/lunr-gr.js rename to _archive_jekyll/assets/js/lunr/lunr-gr.js diff --git a/assets/js/lunr/lunr-store.js b/_archive_jekyll/assets/js/lunr/lunr-store.js similarity index 100% rename from assets/js/lunr/lunr-store.js rename to _archive_jekyll/assets/js/lunr/lunr-store.js diff --git a/assets/js/lunr/lunr.js b/_archive_jekyll/assets/js/lunr/lunr.js similarity index 100% rename from assets/js/lunr/lunr.js rename to _archive_jekyll/assets/js/lunr/lunr.js diff --git a/assets/js/lunr/lunr.min.js b/_archive_jekyll/assets/js/lunr/lunr.min.js similarity index 100% rename from assets/js/lunr/lunr.min.js rename to _archive_jekyll/assets/js/lunr/lunr.min.js diff --git a/assets/js/main.min.js b/_archive_jekyll/assets/js/main.min.js similarity index 100% rename from assets/js/main.min.js rename to _archive_jekyll/assets/js/main.min.js diff --git a/assets/js/plugins/gumshoe.js b/_archive_jekyll/assets/js/plugins/gumshoe.js similarity index 100% rename from assets/js/plugins/gumshoe.js rename to _archive_jekyll/assets/js/plugins/gumshoe.js diff --git a/assets/js/plugins/jquery.ba-throttle-debounce.js b/_archive_jekyll/assets/js/plugins/jquery.ba-throttle-debounce.js similarity index 100% rename from assets/js/plugins/jquery.ba-throttle-debounce.js rename to _archive_jekyll/assets/js/plugins/jquery.ba-throttle-debounce.js diff --git a/assets/js/plugins/jquery.fitvids.js b/_archive_jekyll/assets/js/plugins/jquery.fitvids.js similarity index 100% rename from assets/js/plugins/jquery.fitvids.js rename to _archive_jekyll/assets/js/plugins/jquery.fitvids.js diff --git a/assets/js/plugins/jquery.greedy-navigation.js b/_archive_jekyll/assets/js/plugins/jquery.greedy-navigation.js similarity index 100% rename from assets/js/plugins/jquery.greedy-navigation.js rename to _archive_jekyll/assets/js/plugins/jquery.greedy-navigation.js diff --git a/assets/js/plugins/jquery.magnific-popup.js b/_archive_jekyll/assets/js/plugins/jquery.magnific-popup.js similarity index 100% rename from assets/js/plugins/jquery.magnific-popup.js rename to _archive_jekyll/assets/js/plugins/jquery.magnific-popup.js diff --git a/assets/js/plugins/smooth-scroll.js b/_archive_jekyll/assets/js/plugins/smooth-scroll.js similarity index 100% rename from assets/js/plugins/smooth-scroll.js rename to _archive_jekyll/assets/js/plugins/smooth-scroll.js diff --git a/assets/js/qrcode/qrcode.js b/_archive_jekyll/assets/js/qrcode/qrcode.js similarity index 100% rename from assets/js/qrcode/qrcode.js rename to _archive_jekyll/assets/js/qrcode/qrcode.js diff --git a/assets/js/qrcode/qrcode.min.js b/_archive_jekyll/assets/js/qrcode/qrcode.min.js similarity index 100% rename from assets/js/qrcode/qrcode.min.js rename to _archive_jekyll/assets/js/qrcode/qrcode.min.js diff --git a/assets/js/vendor/jquery/jquery-3.6.0.js b/_archive_jekyll/assets/js/vendor/jquery/jquery-3.6.0.js similarity index 100% rename from assets/js/vendor/jquery/jquery-3.6.0.js rename to _archive_jekyll/assets/js/vendor/jquery/jquery-3.6.0.js diff --git a/banner.js b/_archive_jekyll/banner.js similarity index 100% rename from banner.js rename to _archive_jekyll/banner.js diff --git a/docs/Gemfile b/_archive_jekyll/docs/Gemfile similarity index 100% rename from docs/Gemfile rename to _archive_jekyll/docs/Gemfile diff --git a/docs/_config.dev.yml b/_archive_jekyll/docs/_config.dev.yml similarity index 100% rename from docs/_config.dev.yml rename to _archive_jekyll/docs/_config.dev.yml diff --git a/docs/_config.yml b/_archive_jekyll/docs/_config.yml similarity index 100% rename from docs/_config.yml rename to _archive_jekyll/docs/_config.yml diff --git a/docs/_data/authors.yml b/_archive_jekyll/docs/_data/authors.yml similarity index 100% rename from docs/_data/authors.yml rename to _archive_jekyll/docs/_data/authors.yml diff --git a/docs/_data/comments/chocolate-chip-cookies/comment-1473870213530.yml b/_archive_jekyll/docs/_data/comments/chocolate-chip-cookies/comment-1473870213530.yml similarity index 100% rename from docs/_data/comments/chocolate-chip-cookies/comment-1473870213530.yml rename to _archive_jekyll/docs/_data/comments/chocolate-chip-cookies/comment-1473870213530.yml diff --git a/docs/_data/comments/chocolate-chip-cookies/comment-1478213467992.yml b/_archive_jekyll/docs/_data/comments/chocolate-chip-cookies/comment-1478213467992.yml similarity index 100% rename from docs/_data/comments/chocolate-chip-cookies/comment-1478213467992.yml rename to _archive_jekyll/docs/_data/comments/chocolate-chip-cookies/comment-1478213467992.yml diff --git a/docs/_data/comments/chocolate-chip-cookies/comment-1500181304581.yml b/_archive_jekyll/docs/_data/comments/chocolate-chip-cookies/comment-1500181304581.yml similarity index 100% rename from docs/_data/comments/chocolate-chip-cookies/comment-1500181304581.yml rename to _archive_jekyll/docs/_data/comments/chocolate-chip-cookies/comment-1500181304581.yml diff --git a/docs/_data/comments/chocolate-chip-cookies/comment-1500214855350.yml b/_archive_jekyll/docs/_data/comments/chocolate-chip-cookies/comment-1500214855350.yml similarity index 100% rename from docs/_data/comments/chocolate-chip-cookies/comment-1500214855350.yml rename to _archive_jekyll/docs/_data/comments/chocolate-chip-cookies/comment-1500214855350.yml diff --git a/docs/_data/comments/gemified-theme-beta/comment-1479508047505.yml b/_archive_jekyll/docs/_data/comments/gemified-theme-beta/comment-1479508047505.yml similarity index 100% rename from docs/_data/comments/gemified-theme-beta/comment-1479508047505.yml rename to _archive_jekyll/docs/_data/comments/gemified-theme-beta/comment-1479508047505.yml diff --git a/docs/_data/comments/gemified-theme-beta/comment-1480591890264.yml b/_archive_jekyll/docs/_data/comments/gemified-theme-beta/comment-1480591890264.yml similarity index 100% rename from docs/_data/comments/gemified-theme-beta/comment-1480591890264.yml rename to _archive_jekyll/docs/_data/comments/gemified-theme-beta/comment-1480591890264.yml diff --git a/docs/_data/comments/gemified-theme-beta/comment-1482532165381.yml b/_archive_jekyll/docs/_data/comments/gemified-theme-beta/comment-1482532165381.yml similarity index 100% rename from docs/_data/comments/gemified-theme-beta/comment-1482532165381.yml rename to _archive_jekyll/docs/_data/comments/gemified-theme-beta/comment-1482532165381.yml diff --git a/docs/_data/comments/gemified-theme-beta/comment-1483456786593.yml b/_archive_jekyll/docs/_data/comments/gemified-theme-beta/comment-1483456786593.yml similarity index 100% rename from docs/_data/comments/gemified-theme-beta/comment-1483456786593.yml rename to _archive_jekyll/docs/_data/comments/gemified-theme-beta/comment-1483456786593.yml diff --git a/docs/_data/comments/gemified-theme-beta/comment-1483457152038.yml b/_archive_jekyll/docs/_data/comments/gemified-theme-beta/comment-1483457152038.yml similarity index 100% rename from docs/_data/comments/gemified-theme-beta/comment-1483457152038.yml rename to _archive_jekyll/docs/_data/comments/gemified-theme-beta/comment-1483457152038.yml diff --git a/docs/_data/comments/gemified-theme-beta/comment-1519412839827.yml b/_archive_jekyll/docs/_data/comments/gemified-theme-beta/comment-1519412839827.yml similarity index 100% rename from docs/_data/comments/gemified-theme-beta/comment-1519412839827.yml rename to _archive_jekyll/docs/_data/comments/gemified-theme-beta/comment-1519412839827.yml diff --git a/docs/_data/comments/layout-comments/comment-1470944006665.yml b/_archive_jekyll/docs/_data/comments/layout-comments/comment-1470944006665.yml similarity index 100% rename from docs/_data/comments/layout-comments/comment-1470944006665.yml rename to _archive_jekyll/docs/_data/comments/layout-comments/comment-1470944006665.yml diff --git a/docs/_data/comments/layout-comments/comment-1470944162041.yml b/_archive_jekyll/docs/_data/comments/layout-comments/comment-1470944162041.yml similarity index 100% rename from docs/_data/comments/layout-comments/comment-1470944162041.yml rename to _archive_jekyll/docs/_data/comments/layout-comments/comment-1470944162041.yml diff --git a/docs/_data/comments/layout-comments/comment-1472308473018.yml b/_archive_jekyll/docs/_data/comments/layout-comments/comment-1472308473018.yml similarity index 100% rename from docs/_data/comments/layout-comments/comment-1472308473018.yml rename to _archive_jekyll/docs/_data/comments/layout-comments/comment-1472308473018.yml diff --git a/docs/_data/comments/layout-comments/comment-1514406795156.yml b/_archive_jekyll/docs/_data/comments/layout-comments/comment-1514406795156.yml similarity index 100% rename from docs/_data/comments/layout-comments/comment-1514406795156.yml rename to _archive_jekyll/docs/_data/comments/layout-comments/comment-1514406795156.yml diff --git a/docs/_data/comments/layout-comments/comment-1514407115153.yml b/_archive_jekyll/docs/_data/comments/layout-comments/comment-1514407115153.yml similarity index 100% rename from docs/_data/comments/layout-comments/comment-1514407115153.yml rename to _archive_jekyll/docs/_data/comments/layout-comments/comment-1514407115153.yml diff --git a/docs/_data/comments/layout-comments/comment-1538482988032.yml b/_archive_jekyll/docs/_data/comments/layout-comments/comment-1538482988032.yml similarity index 100% rename from docs/_data/comments/layout-comments/comment-1538482988032.yml rename to _archive_jekyll/docs/_data/comments/layout-comments/comment-1538482988032.yml diff --git a/docs/_data/comments/layout-header-image-horizontal/comment-1483124729757.yml b/_archive_jekyll/docs/_data/comments/layout-header-image-horizontal/comment-1483124729757.yml similarity index 100% rename from docs/_data/comments/layout-header-image-horizontal/comment-1483124729757.yml rename to _archive_jekyll/docs/_data/comments/layout-header-image-horizontal/comment-1483124729757.yml diff --git a/docs/_data/comments/layout-header-image-horizontal/comment-1483128389943.yml b/_archive_jekyll/docs/_data/comments/layout-header-image-horizontal/comment-1483128389943.yml similarity index 100% rename from docs/_data/comments/layout-header-image-horizontal/comment-1483128389943.yml rename to _archive_jekyll/docs/_data/comments/layout-header-image-horizontal/comment-1483128389943.yml diff --git a/docs/_data/comments/layout-header-image-text-readability/comment-1474306861206.yml b/_archive_jekyll/docs/_data/comments/layout-header-image-text-readability/comment-1474306861206.yml similarity index 100% rename from docs/_data/comments/layout-header-image-text-readability/comment-1474306861206.yml rename to _archive_jekyll/docs/_data/comments/layout-header-image-text-readability/comment-1474306861206.yml diff --git a/docs/_data/comments/layout-header-image-text-readability/comment-1479253931238.yml b/_archive_jekyll/docs/_data/comments/layout-header-image-text-readability/comment-1479253931238.yml similarity index 100% rename from docs/_data/comments/layout-header-image-text-readability/comment-1479253931238.yml rename to _archive_jekyll/docs/_data/comments/layout-header-image-text-readability/comment-1479253931238.yml diff --git a/docs/_data/comments/layout-header-image-text-readability/comment-1479265677846.yml b/_archive_jekyll/docs/_data/comments/layout-header-image-text-readability/comment-1479265677846.yml similarity index 100% rename from docs/_data/comments/layout-header-image-text-readability/comment-1479265677846.yml rename to _archive_jekyll/docs/_data/comments/layout-header-image-text-readability/comment-1479265677846.yml diff --git a/docs/_data/comments/layout-header-overlay-image/comment-1512840683260.yml b/_archive_jekyll/docs/_data/comments/layout-header-overlay-image/comment-1512840683260.yml similarity index 100% rename from docs/_data/comments/layout-header-overlay-image/comment-1512840683260.yml rename to _archive_jekyll/docs/_data/comments/layout-header-overlay-image/comment-1512840683260.yml diff --git a/docs/_data/comments/layout-header-overlay-image/comment-1513110608614.yml b/_archive_jekyll/docs/_data/comments/layout-header-overlay-image/comment-1513110608614.yml similarity index 100% rename from docs/_data/comments/layout-header-overlay-image/comment-1513110608614.yml rename to _archive_jekyll/docs/_data/comments/layout-header-overlay-image/comment-1513110608614.yml diff --git a/docs/_data/comments/layout-header-overlay-image/comment-1513111329875.yml b/_archive_jekyll/docs/_data/comments/layout-header-overlay-image/comment-1513111329875.yml similarity index 100% rename from docs/_data/comments/layout-header-overlay-image/comment-1513111329875.yml rename to _archive_jekyll/docs/_data/comments/layout-header-overlay-image/comment-1513111329875.yml diff --git a/docs/_data/comments/layout-header-overlay-image/comment-1513111563922.yml b/_archive_jekyll/docs/_data/comments/layout-header-overlay-image/comment-1513111563922.yml similarity index 100% rename from docs/_data/comments/layout-header-overlay-image/comment-1513111563922.yml rename to _archive_jekyll/docs/_data/comments/layout-header-overlay-image/comment-1513111563922.yml diff --git a/docs/_data/comments/layout-related-posts/comment-1500183131535.yml b/_archive_jekyll/docs/_data/comments/layout-related-posts/comment-1500183131535.yml similarity index 100% rename from docs/_data/comments/layout-related-posts/comment-1500183131535.yml rename to _archive_jekyll/docs/_data/comments/layout-related-posts/comment-1500183131535.yml diff --git a/docs/_data/comments/layout-related-posts/comment-1500214974083.yml b/_archive_jekyll/docs/_data/comments/layout-related-posts/comment-1500214974083.yml similarity index 100% rename from docs/_data/comments/layout-related-posts/comment-1500214974083.yml rename to _archive_jekyll/docs/_data/comments/layout-related-posts/comment-1500214974083.yml diff --git a/docs/_data/comments/layout-sidebar-custom/comment-1519247076880.yml b/_archive_jekyll/docs/_data/comments/layout-sidebar-custom/comment-1519247076880.yml similarity index 100% rename from docs/_data/comments/layout-sidebar-custom/comment-1519247076880.yml rename to _archive_jekyll/docs/_data/comments/layout-sidebar-custom/comment-1519247076880.yml diff --git a/docs/_data/comments/layout-sidebar-custom/comment-1519247290410.yml b/_archive_jekyll/docs/_data/comments/layout-sidebar-custom/comment-1519247290410.yml similarity index 100% rename from docs/_data/comments/layout-sidebar-custom/comment-1519247290410.yml rename to _archive_jekyll/docs/_data/comments/layout-sidebar-custom/comment-1519247290410.yml diff --git a/docs/_data/comments/layout-sidebar-custom/comment-1520748170396.yml b/_archive_jekyll/docs/_data/comments/layout-sidebar-custom/comment-1520748170396.yml similarity index 100% rename from docs/_data/comments/layout-sidebar-custom/comment-1520748170396.yml rename to _archive_jekyll/docs/_data/comments/layout-sidebar-custom/comment-1520748170396.yml diff --git a/docs/_data/comments/layout-sidebar-nav-list/comment-1492811460488.yml b/_archive_jekyll/docs/_data/comments/layout-sidebar-nav-list/comment-1492811460488.yml similarity index 100% rename from docs/_data/comments/layout-sidebar-nav-list/comment-1492811460488.yml rename to _archive_jekyll/docs/_data/comments/layout-sidebar-nav-list/comment-1492811460488.yml diff --git a/docs/_data/comments/layout-sidebar-nav-list/comment-1492812977693.yml b/_archive_jekyll/docs/_data/comments/layout-sidebar-nav-list/comment-1492812977693.yml similarity index 100% rename from docs/_data/comments/layout-sidebar-nav-list/comment-1492812977693.yml rename to _archive_jekyll/docs/_data/comments/layout-sidebar-nav-list/comment-1492812977693.yml diff --git a/docs/_data/comments/layout-table-of-contents-post/comment-1512118683486.yml b/_archive_jekyll/docs/_data/comments/layout-table-of-contents-post/comment-1512118683486.yml similarity index 100% rename from docs/_data/comments/layout-table-of-contents-post/comment-1512118683486.yml rename to _archive_jekyll/docs/_data/comments/layout-table-of-contents-post/comment-1512118683486.yml diff --git a/docs/_data/comments/layout-table-of-contents-post/comment-1520683848241.yml b/_archive_jekyll/docs/_data/comments/layout-table-of-contents-post/comment-1520683848241.yml similarity index 100% rename from docs/_data/comments/layout-table-of-contents-post/comment-1520683848241.yml rename to _archive_jekyll/docs/_data/comments/layout-table-of-contents-post/comment-1520683848241.yml diff --git a/docs/_data/comments/layout-table-of-contents-post/comment-1527082094887.yml b/_archive_jekyll/docs/_data/comments/layout-table-of-contents-post/comment-1527082094887.yml similarity index 100% rename from docs/_data/comments/layout-table-of-contents-post/comment-1527082094887.yml rename to _archive_jekyll/docs/_data/comments/layout-table-of-contents-post/comment-1527082094887.yml diff --git a/docs/_data/comments/layout-table-of-contents-post/comment-1527500055863.yml b/_archive_jekyll/docs/_data/comments/layout-table-of-contents-post/comment-1527500055863.yml similarity index 100% rename from docs/_data/comments/layout-table-of-contents-post/comment-1527500055863.yml rename to _archive_jekyll/docs/_data/comments/layout-table-of-contents-post/comment-1527500055863.yml diff --git a/docs/_data/comments/layout-table-of-contents-post/comment-1527690060032.yml b/_archive_jekyll/docs/_data/comments/layout-table-of-contents-post/comment-1527690060032.yml similarity index 100% rename from docs/_data/comments/layout-table-of-contents-post/comment-1527690060032.yml rename to _archive_jekyll/docs/_data/comments/layout-table-of-contents-post/comment-1527690060032.yml diff --git a/docs/_data/comments/layout-table-of-contents-post/comment-1527690281769.yml b/_archive_jekyll/docs/_data/comments/layout-table-of-contents-post/comment-1527690281769.yml similarity index 100% rename from docs/_data/comments/layout-table-of-contents-post/comment-1527690281769.yml rename to _archive_jekyll/docs/_data/comments/layout-table-of-contents-post/comment-1527690281769.yml diff --git a/docs/_data/comments/layout-table-of-contents-post/comment-1540422628114.yml b/_archive_jekyll/docs/_data/comments/layout-table-of-contents-post/comment-1540422628114.yml similarity index 100% rename from docs/_data/comments/layout-table-of-contents-post/comment-1540422628114.yml rename to _archive_jekyll/docs/_data/comments/layout-table-of-contents-post/comment-1540422628114.yml diff --git a/docs/_data/comments/markup-image-alignment/comment-1534823211504.yml b/_archive_jekyll/docs/_data/comments/markup-image-alignment/comment-1534823211504.yml similarity index 100% rename from docs/_data/comments/markup-image-alignment/comment-1534823211504.yml rename to _archive_jekyll/docs/_data/comments/markup-image-alignment/comment-1534823211504.yml diff --git a/docs/_data/comments/markup-more-images/comment-1472040323579.yml b/_archive_jekyll/docs/_data/comments/markup-more-images/comment-1472040323579.yml similarity index 100% rename from docs/_data/comments/markup-more-images/comment-1472040323579.yml rename to _archive_jekyll/docs/_data/comments/markup-more-images/comment-1472040323579.yml diff --git a/docs/_data/comments/markup-more-images/comment-1472146638519.yml b/_archive_jekyll/docs/_data/comments/markup-more-images/comment-1472146638519.yml similarity index 100% rename from docs/_data/comments/markup-more-images/comment-1472146638519.yml rename to _archive_jekyll/docs/_data/comments/markup-more-images/comment-1472146638519.yml diff --git a/docs/_data/comments/markup-syntax-highlighting/comment-1470969665387.yml b/_archive_jekyll/docs/_data/comments/markup-syntax-highlighting/comment-1470969665387.yml similarity index 100% rename from docs/_data/comments/markup-syntax-highlighting/comment-1470969665387.yml rename to _archive_jekyll/docs/_data/comments/markup-syntax-highlighting/comment-1470969665387.yml diff --git a/docs/_data/comments/markup-syntax-highlighting/comment-1478928407894.yml b/_archive_jekyll/docs/_data/comments/markup-syntax-highlighting/comment-1478928407894.yml similarity index 100% rename from docs/_data/comments/markup-syntax-highlighting/comment-1478928407894.yml rename to _archive_jekyll/docs/_data/comments/markup-syntax-highlighting/comment-1478928407894.yml diff --git a/docs/_data/comments/markup-syntax-highlighting/comment-1487758246637.yml b/_archive_jekyll/docs/_data/comments/markup-syntax-highlighting/comment-1487758246637.yml similarity index 100% rename from docs/_data/comments/markup-syntax-highlighting/comment-1487758246637.yml rename to _archive_jekyll/docs/_data/comments/markup-syntax-highlighting/comment-1487758246637.yml diff --git a/docs/_data/comments/markup-syntax-highlighting/comment-1505403032256.yml b/_archive_jekyll/docs/_data/comments/markup-syntax-highlighting/comment-1505403032256.yml similarity index 100% rename from docs/_data/comments/markup-syntax-highlighting/comment-1505403032256.yml rename to _archive_jekyll/docs/_data/comments/markup-syntax-highlighting/comment-1505403032256.yml diff --git a/docs/_data/comments/markup-syntax-highlighting/comment-1505403241808.yml b/_archive_jekyll/docs/_data/comments/markup-syntax-highlighting/comment-1505403241808.yml similarity index 100% rename from docs/_data/comments/markup-syntax-highlighting/comment-1505403241808.yml rename to _archive_jekyll/docs/_data/comments/markup-syntax-highlighting/comment-1505403241808.yml diff --git a/docs/_data/comments/markup-syntax-highlighting/comment-1514836962551.yml b/_archive_jekyll/docs/_data/comments/markup-syntax-highlighting/comment-1514836962551.yml similarity index 100% rename from docs/_data/comments/markup-syntax-highlighting/comment-1514836962551.yml rename to _archive_jekyll/docs/_data/comments/markup-syntax-highlighting/comment-1514836962551.yml diff --git a/docs/_data/comments/post-future-date/comment-1472064560364.yml b/_archive_jekyll/docs/_data/comments/post-future-date/comment-1472064560364.yml similarity index 100% rename from docs/_data/comments/post-future-date/comment-1472064560364.yml rename to _archive_jekyll/docs/_data/comments/post-future-date/comment-1472064560364.yml diff --git a/docs/_data/comments/post-future-date/comment-1472786137736.yml b/_archive_jekyll/docs/_data/comments/post-future-date/comment-1472786137736.yml similarity index 100% rename from docs/_data/comments/post-future-date/comment-1472786137736.yml rename to _archive_jekyll/docs/_data/comments/post-future-date/comment-1472786137736.yml diff --git a/docs/_data/comments/post-gallery/comment-1500055247314.yml b/_archive_jekyll/docs/_data/comments/post-gallery/comment-1500055247314.yml similarity index 100% rename from docs/_data/comments/post-gallery/comment-1500055247314.yml rename to _archive_jekyll/docs/_data/comments/post-gallery/comment-1500055247314.yml diff --git a/docs/_data/comments/post-gallery/comment-1500056210776.yml b/_archive_jekyll/docs/_data/comments/post-gallery/comment-1500056210776.yml similarity index 100% rename from docs/_data/comments/post-gallery/comment-1500056210776.yml rename to _archive_jekyll/docs/_data/comments/post-gallery/comment-1500056210776.yml diff --git a/docs/_data/comments/post-modified/comment-1497284119888.yml b/_archive_jekyll/docs/_data/comments/post-modified/comment-1497284119888.yml similarity index 100% rename from docs/_data/comments/post-modified/comment-1497284119888.yml rename to _archive_jekyll/docs/_data/comments/post-modified/comment-1497284119888.yml diff --git a/docs/_data/comments/post-modified/comment-1497284892766.yml b/_archive_jekyll/docs/_data/comments/post-modified/comment-1497284892766.yml similarity index 100% rename from docs/_data/comments/post-modified/comment-1497284892766.yml rename to _archive_jekyll/docs/_data/comments/post-modified/comment-1497284892766.yml diff --git a/docs/_data/comments/post-modified/comment-1520673777110.yml b/_archive_jekyll/docs/_data/comments/post-modified/comment-1520673777110.yml similarity index 100% rename from docs/_data/comments/post-modified/comment-1520673777110.yml rename to _archive_jekyll/docs/_data/comments/post-modified/comment-1520673777110.yml diff --git a/docs/_data/comments/post-video-youtube/comment-1506623182288.yml b/_archive_jekyll/docs/_data/comments/post-video-youtube/comment-1506623182288.yml similarity index 100% rename from docs/_data/comments/post-video-youtube/comment-1506623182288.yml rename to _archive_jekyll/docs/_data/comments/post-video-youtube/comment-1506623182288.yml diff --git a/docs/_data/comments/post-video-youtube/comment-1506623710918.yml b/_archive_jekyll/docs/_data/comments/post-video-youtube/comment-1506623710918.yml similarity index 100% rename from docs/_data/comments/post-video-youtube/comment-1506623710918.yml rename to _archive_jekyll/docs/_data/comments/post-video-youtube/comment-1506623710918.yml diff --git a/docs/_data/comments/post-video-youtube/comment-1506632190623.yml b/_archive_jekyll/docs/_data/comments/post-video-youtube/comment-1506632190623.yml similarity index 100% rename from docs/_data/comments/post-video-youtube/comment-1506632190623.yml rename to _archive_jekyll/docs/_data/comments/post-video-youtube/comment-1506632190623.yml diff --git a/docs/_data/comments/welcome-to-jekyll/comment-1470942205700.yml b/_archive_jekyll/docs/_data/comments/welcome-to-jekyll/comment-1470942205700.yml similarity index 100% rename from docs/_data/comments/welcome-to-jekyll/comment-1470942205700.yml rename to _archive_jekyll/docs/_data/comments/welcome-to-jekyll/comment-1470942205700.yml diff --git a/docs/_data/comments/welcome-to-jekyll/comment-1470942247755.yml b/_archive_jekyll/docs/_data/comments/welcome-to-jekyll/comment-1470942247755.yml similarity index 100% rename from docs/_data/comments/welcome-to-jekyll/comment-1470942247755.yml rename to _archive_jekyll/docs/_data/comments/welcome-to-jekyll/comment-1470942247755.yml diff --git a/docs/_data/comments/welcome-to-jekyll/comment-1470942265819.yml b/_archive_jekyll/docs/_data/comments/welcome-to-jekyll/comment-1470942265819.yml similarity index 100% rename from docs/_data/comments/welcome-to-jekyll/comment-1470942265819.yml rename to _archive_jekyll/docs/_data/comments/welcome-to-jekyll/comment-1470942265819.yml diff --git a/docs/_data/comments/welcome-to-jekyll/comment-1470942493518.yml b/_archive_jekyll/docs/_data/comments/welcome-to-jekyll/comment-1470942493518.yml similarity index 100% rename from docs/_data/comments/welcome-to-jekyll/comment-1470942493518.yml rename to _archive_jekyll/docs/_data/comments/welcome-to-jekyll/comment-1470942493518.yml diff --git a/docs/_data/comments/welcome-to-jekyll/comment-1471823346931.yml b/_archive_jekyll/docs/_data/comments/welcome-to-jekyll/comment-1471823346931.yml similarity index 100% rename from docs/_data/comments/welcome-to-jekyll/comment-1471823346931.yml rename to _archive_jekyll/docs/_data/comments/welcome-to-jekyll/comment-1471823346931.yml diff --git a/docs/_data/comments/welcome-to-jekyll/comment-1471834988411.yml b/_archive_jekyll/docs/_data/comments/welcome-to-jekyll/comment-1471834988411.yml similarity index 100% rename from docs/_data/comments/welcome-to-jekyll/comment-1471834988411.yml rename to _archive_jekyll/docs/_data/comments/welcome-to-jekyll/comment-1471834988411.yml diff --git a/docs/_data/comments/welcome-to-jekyll/comment-1472786599470.yml b/_archive_jekyll/docs/_data/comments/welcome-to-jekyll/comment-1472786599470.yml similarity index 100% rename from docs/_data/comments/welcome-to-jekyll/comment-1472786599470.yml rename to _archive_jekyll/docs/_data/comments/welcome-to-jekyll/comment-1472786599470.yml diff --git a/docs/_data/comments/welcome-to-jekyll/comment-1474328950155.yml b/_archive_jekyll/docs/_data/comments/welcome-to-jekyll/comment-1474328950155.yml similarity index 100% rename from docs/_data/comments/welcome-to-jekyll/comment-1474328950155.yml rename to _archive_jekyll/docs/_data/comments/welcome-to-jekyll/comment-1474328950155.yml diff --git a/docs/_data/comments/welcome-to-jekyll/comment-1500505983331.yml b/_archive_jekyll/docs/_data/comments/welcome-to-jekyll/comment-1500505983331.yml similarity index 100% rename from docs/_data/comments/welcome-to-jekyll/comment-1500505983331.yml rename to _archive_jekyll/docs/_data/comments/welcome-to-jekyll/comment-1500505983331.yml diff --git a/docs/_data/comments/welcome-to-jekyll/comment-1507141538771.yml b/_archive_jekyll/docs/_data/comments/welcome-to-jekyll/comment-1507141538771.yml similarity index 100% rename from docs/_data/comments/welcome-to-jekyll/comment-1507141538771.yml rename to _archive_jekyll/docs/_data/comments/welcome-to-jekyll/comment-1507141538771.yml diff --git a/docs/_data/comments/welcome-to-jekyll/comment-1529792272424.yml b/_archive_jekyll/docs/_data/comments/welcome-to-jekyll/comment-1529792272424.yml similarity index 100% rename from docs/_data/comments/welcome-to-jekyll/comment-1529792272424.yml rename to _archive_jekyll/docs/_data/comments/welcome-to-jekyll/comment-1529792272424.yml diff --git a/docs/_data/comments/welcome-to-jekyll/comment-1529794012288.yml b/_archive_jekyll/docs/_data/comments/welcome-to-jekyll/comment-1529794012288.yml similarity index 100% rename from docs/_data/comments/welcome-to-jekyll/comment-1529794012288.yml rename to _archive_jekyll/docs/_data/comments/welcome-to-jekyll/comment-1529794012288.yml diff --git a/docs/_data/navigation.yml b/_archive_jekyll/docs/_data/navigation.yml similarity index 100% rename from docs/_data/navigation.yml rename to _archive_jekyll/docs/_data/navigation.yml diff --git a/docs/_docs/01-quick-start-guide.md b/_archive_jekyll/docs/_docs/01-quick-start-guide.md similarity index 100% rename from docs/_docs/01-quick-start-guide.md rename to _archive_jekyll/docs/_docs/01-quick-start-guide.md diff --git a/docs/_docs/02-structure.md b/_archive_jekyll/docs/_docs/02-structure.md similarity index 100% rename from docs/_docs/02-structure.md rename to _archive_jekyll/docs/_docs/02-structure.md diff --git a/docs/_docs/03-installation.md b/_archive_jekyll/docs/_docs/03-installation.md similarity index 100% rename from docs/_docs/03-installation.md rename to _archive_jekyll/docs/_docs/03-installation.md diff --git a/docs/_docs/04-upgrading.md b/_archive_jekyll/docs/_docs/04-upgrading.md similarity index 100% rename from docs/_docs/04-upgrading.md rename to _archive_jekyll/docs/_docs/04-upgrading.md diff --git a/docs/_docs/05-configuration.md b/_archive_jekyll/docs/_docs/05-configuration.md similarity index 100% rename from docs/_docs/05-configuration.md rename to _archive_jekyll/docs/_docs/05-configuration.md diff --git a/docs/_docs/06-overriding-theme-defaults.md b/_archive_jekyll/docs/_docs/06-overriding-theme-defaults.md similarity index 100% rename from docs/_docs/06-overriding-theme-defaults.md rename to _archive_jekyll/docs/_docs/06-overriding-theme-defaults.md diff --git a/docs/_docs/07-navigation.md b/_archive_jekyll/docs/_docs/07-navigation.md similarity index 100% rename from docs/_docs/07-navigation.md rename to _archive_jekyll/docs/_docs/07-navigation.md diff --git a/docs/_docs/08-ui-text.md b/_archive_jekyll/docs/_docs/08-ui-text.md similarity index 100% rename from docs/_docs/08-ui-text.md rename to _archive_jekyll/docs/_docs/08-ui-text.md diff --git a/docs/_docs/09-authors.md b/_archive_jekyll/docs/_docs/09-authors.md similarity index 100% rename from docs/_docs/09-authors.md rename to _archive_jekyll/docs/_docs/09-authors.md diff --git a/docs/_docs/10-layouts.md b/_archive_jekyll/docs/_docs/10-layouts.md similarity index 100% rename from docs/_docs/10-layouts.md rename to _archive_jekyll/docs/_docs/10-layouts.md diff --git a/docs/_docs/11-posts.md b/_archive_jekyll/docs/_docs/11-posts.md similarity index 100% rename from docs/_docs/11-posts.md rename to _archive_jekyll/docs/_docs/11-posts.md diff --git a/docs/_docs/12-pages.md b/_archive_jekyll/docs/_docs/12-pages.md similarity index 100% rename from docs/_docs/12-pages.md rename to _archive_jekyll/docs/_docs/12-pages.md diff --git a/docs/_docs/13-collections.md b/_archive_jekyll/docs/_docs/13-collections.md similarity index 100% rename from docs/_docs/13-collections.md rename to _archive_jekyll/docs/_docs/13-collections.md diff --git a/docs/_docs/14-helpers.md b/_archive_jekyll/docs/_docs/14-helpers.md similarity index 100% rename from docs/_docs/14-helpers.md rename to _archive_jekyll/docs/_docs/14-helpers.md diff --git a/docs/_docs/15-utility-classes.md b/_archive_jekyll/docs/_docs/15-utility-classes.md similarity index 100% rename from docs/_docs/15-utility-classes.md rename to _archive_jekyll/docs/_docs/15-utility-classes.md diff --git a/docs/_docs/16-stylesheets.md b/_archive_jekyll/docs/_docs/16-stylesheets.md similarity index 100% rename from docs/_docs/16-stylesheets.md rename to _archive_jekyll/docs/_docs/16-stylesheets.md diff --git a/docs/_docs/17-javascript.md b/_archive_jekyll/docs/_docs/17-javascript.md similarity index 100% rename from docs/_docs/17-javascript.md rename to _archive_jekyll/docs/_docs/17-javascript.md diff --git a/docs/_docs/18-history.md b/_archive_jekyll/docs/_docs/18-history.md similarity index 100% rename from docs/_docs/18-history.md rename to _archive_jekyll/docs/_docs/18-history.md diff --git a/docs/_docs/19-contributing.md b/_archive_jekyll/docs/_docs/19-contributing.md similarity index 100% rename from docs/_docs/19-contributing.md rename to _archive_jekyll/docs/_docs/19-contributing.md diff --git a/docs/_docs/20-docs-2-2.md b/_archive_jekyll/docs/_docs/20-docs-2-2.md similarity index 100% rename from docs/_docs/20-docs-2-2.md rename to _archive_jekyll/docs/_docs/20-docs-2-2.md diff --git a/docs/_docs/21-license.md b/_archive_jekyll/docs/_docs/21-license.md similarity index 100% rename from docs/_docs/21-license.md rename to _archive_jekyll/docs/_docs/21-license.md diff --git a/docs/_drafts/post-draft.md b/_archive_jekyll/docs/_drafts/post-draft.md similarity index 100% rename from docs/_drafts/post-draft.md rename to _archive_jekyll/docs/_drafts/post-draft.md diff --git a/docs/_layouts/default.html b/_archive_jekyll/docs/_layouts/default.html similarity index 100% rename from docs/_layouts/default.html rename to _archive_jekyll/docs/_layouts/default.html diff --git a/docs/_layouts/single.html b/_archive_jekyll/docs/_layouts/single.html similarity index 100% rename from docs/_layouts/single.html rename to _archive_jekyll/docs/_layouts/single.html diff --git a/docs/_pages/404.md b/_archive_jekyll/docs/_pages/404.md similarity index 100% rename from docs/_pages/404.md rename to _archive_jekyll/docs/_pages/404.md diff --git a/docs/_pages/about.md b/_archive_jekyll/docs/_pages/about.md similarity index 100% rename from docs/_pages/about.md rename to _archive_jekyll/docs/_pages/about.md diff --git a/docs/_pages/archive-layout-with-content.md b/_archive_jekyll/docs/_pages/archive-layout-with-content.md similarity index 100% rename from docs/_pages/archive-layout-with-content.md rename to _archive_jekyll/docs/_pages/archive-layout-with-content.md diff --git a/docs/_pages/category-archive.md b/_archive_jekyll/docs/_pages/category-archive.md similarity index 100% rename from docs/_pages/category-archive.md rename to _archive_jekyll/docs/_pages/category-archive.md diff --git a/docs/_pages/collection-archive.html b/_archive_jekyll/docs/_pages/collection-archive.html similarity index 100% rename from docs/_pages/collection-archive.html rename to _archive_jekyll/docs/_pages/collection-archive.html diff --git a/docs/_pages/edge-case.md b/_archive_jekyll/docs/_pages/edge-case.md similarity index 100% rename from docs/_pages/edge-case.md rename to _archive_jekyll/docs/_pages/edge-case.md diff --git a/docs/_pages/home.md b/_archive_jekyll/docs/_pages/home.md similarity index 100% rename from docs/_pages/home.md rename to _archive_jekyll/docs/_pages/home.md diff --git a/docs/_pages/lorem-ipsum.md b/_archive_jekyll/docs/_pages/lorem-ipsum.md similarity index 100% rename from docs/_pages/lorem-ipsum.md rename to _archive_jekyll/docs/_pages/lorem-ipsum.md diff --git a/docs/_pages/markup.md b/_archive_jekyll/docs/_pages/markup.md similarity index 100% rename from docs/_pages/markup.md rename to _archive_jekyll/docs/_pages/markup.md diff --git a/docs/_pages/page-a.md b/_archive_jekyll/docs/_pages/page-a.md similarity index 100% rename from docs/_pages/page-a.md rename to _archive_jekyll/docs/_pages/page-a.md diff --git a/docs/_pages/page-archive.html b/_archive_jekyll/docs/_pages/page-archive.html similarity index 100% rename from docs/_pages/page-archive.html rename to _archive_jekyll/docs/_pages/page-archive.html diff --git a/docs/_pages/page-b.md b/_archive_jekyll/docs/_pages/page-b.md similarity index 100% rename from docs/_pages/page-b.md rename to _archive_jekyll/docs/_pages/page-b.md diff --git a/docs/_pages/pets.md b/_archive_jekyll/docs/_pages/pets.md similarity index 100% rename from docs/_pages/pets.md rename to _archive_jekyll/docs/_pages/pets.md diff --git a/docs/_pages/portfolio-archive.md b/_archive_jekyll/docs/_pages/portfolio-archive.md similarity index 100% rename from docs/_pages/portfolio-archive.md rename to _archive_jekyll/docs/_pages/portfolio-archive.md diff --git a/docs/_pages/post-archive-feature-rows.html b/_archive_jekyll/docs/_pages/post-archive-feature-rows.html similarity index 100% rename from docs/_pages/post-archive-feature-rows.html rename to _archive_jekyll/docs/_pages/post-archive-feature-rows.html diff --git a/docs/_pages/recipes-archive.md b/_archive_jekyll/docs/_pages/recipes-archive.md similarity index 100% rename from docs/_pages/recipes-archive.md rename to _archive_jekyll/docs/_pages/recipes-archive.md diff --git a/docs/_pages/sample-page.md b/_archive_jekyll/docs/_pages/sample-page.md similarity index 100% rename from docs/_pages/sample-page.md rename to _archive_jekyll/docs/_pages/sample-page.md diff --git a/docs/_pages/sitemap.md b/_archive_jekyll/docs/_pages/sitemap.md similarity index 100% rename from docs/_pages/sitemap.md rename to _archive_jekyll/docs/_pages/sitemap.md diff --git a/docs/_pages/splash-page.md b/_archive_jekyll/docs/_pages/splash-page.md similarity index 100% rename from docs/_pages/splash-page.md rename to _archive_jekyll/docs/_pages/splash-page.md diff --git a/docs/_pages/tag-archive.md b/_archive_jekyll/docs/_pages/tag-archive.md similarity index 100% rename from docs/_pages/tag-archive.md rename to _archive_jekyll/docs/_pages/tag-archive.md diff --git a/docs/_pages/terms.md b/_archive_jekyll/docs/_pages/terms.md similarity index 100% rename from docs/_pages/terms.md rename to _archive_jekyll/docs/_pages/terms.md diff --git a/docs/_pages/year-archive.md b/_archive_jekyll/docs/_pages/year-archive.md similarity index 100% rename from docs/_pages/year-archive.md rename to _archive_jekyll/docs/_pages/year-archive.md diff --git a/docs/_pets/lhasa-apso.md b/_archive_jekyll/docs/_pets/lhasa-apso.md similarity index 100% rename from docs/_pets/lhasa-apso.md rename to _archive_jekyll/docs/_pets/lhasa-apso.md diff --git a/docs/_pets/tabby.md b/_archive_jekyll/docs/_pets/tabby.md similarity index 100% rename from docs/_pets/tabby.md rename to _archive_jekyll/docs/_pets/tabby.md diff --git a/docs/_portfolio/baz-boom-identity.md b/_archive_jekyll/docs/_portfolio/baz-boom-identity.md similarity index 100% rename from docs/_portfolio/baz-boom-identity.md rename to _archive_jekyll/docs/_portfolio/baz-boom-identity.md diff --git a/docs/_portfolio/fizz-bang-identity.md b/_archive_jekyll/docs/_portfolio/fizz-bang-identity.md similarity index 100% rename from docs/_portfolio/fizz-bang-identity.md rename to _archive_jekyll/docs/_portfolio/fizz-bang-identity.md diff --git a/docs/_portfolio/foo-bar-website.md b/_archive_jekyll/docs/_portfolio/foo-bar-website.md similarity index 100% rename from docs/_portfolio/foo-bar-website.md rename to _archive_jekyll/docs/_portfolio/foo-bar-website.md diff --git a/docs/_portfolio/ginger-gulp-identity.md b/_archive_jekyll/docs/_portfolio/ginger-gulp-identity.md similarity index 100% rename from docs/_portfolio/ginger-gulp-identity.md rename to _archive_jekyll/docs/_portfolio/ginger-gulp-identity.md diff --git a/docs/_posts/2009-05-15-edge-case-nested-and-mixed-lists.md b/_archive_jekyll/docs/_posts/2009-05-15-edge-case-nested-and-mixed-lists.md similarity index 100% rename from docs/_posts/2009-05-15-edge-case-nested-and-mixed-lists.md rename to _archive_jekyll/docs/_posts/2009-05-15-edge-case-nested-and-mixed-lists.md diff --git a/docs/_posts/2009-06-01-edge-case-many-tags.md b/_archive_jekyll/docs/_posts/2009-06-01-edge-case-many-tags.md similarity index 100% rename from docs/_posts/2009-06-01-edge-case-many-tags.md rename to _archive_jekyll/docs/_posts/2009-06-01-edge-case-many-tags.md diff --git a/docs/_posts/2009-07-02-edge-case-many-categories.md b/_archive_jekyll/docs/_posts/2009-07-02-edge-case-many-categories.md similarity index 100% rename from docs/_posts/2009-07-02-edge-case-many-categories.md rename to _archive_jekyll/docs/_posts/2009-07-02-edge-case-many-categories.md diff --git a/docs/_posts/2009-08-06-edge-case-no-body-content.md b/_archive_jekyll/docs/_posts/2009-08-06-edge-case-no-body-content.md similarity index 100% rename from docs/_posts/2009-08-06-edge-case-no-body-content.md rename to _archive_jekyll/docs/_posts/2009-08-06-edge-case-no-body-content.md diff --git a/docs/_posts/2009-09-05-edge-case-no-yaml-title.md b/_archive_jekyll/docs/_posts/2009-09-05-edge-case-no-yaml-title.md similarity index 100% rename from docs/_posts/2009-09-05-edge-case-no-yaml-title.md rename to _archive_jekyll/docs/_posts/2009-09-05-edge-case-no-yaml-title.md diff --git a/docs/_posts/2009-10-05-edge-case-multiline-excerpt.md b/_archive_jekyll/docs/_posts/2009-10-05-edge-case-multiline-excerpt.md similarity index 100% rename from docs/_posts/2009-10-05-edge-case-multiline-excerpt.md rename to _archive_jekyll/docs/_posts/2009-10-05-edge-case-multiline-excerpt.md diff --git a/docs/_posts/2009-10-05-edge-case-title-should-not-overflow-the-content-area.md b/_archive_jekyll/docs/_posts/2009-10-05-edge-case-title-should-not-overflow-the-content-area.md similarity index 100% rename from docs/_posts/2009-10-05-edge-case-title-should-not-overflow-the-content-area.md rename to _archive_jekyll/docs/_posts/2009-10-05-edge-case-title-should-not-overflow-the-content-area.md diff --git a/docs/_posts/2009-10-05-edge-case-very-long-title.md b/_archive_jekyll/docs/_posts/2009-10-05-edge-case-very-long-title.md similarity index 100% rename from docs/_posts/2009-10-05-edge-case-very-long-title.md rename to _archive_jekyll/docs/_posts/2009-10-05-edge-case-very-long-title.md diff --git a/docs/_posts/2010-01-07-post-modified.md b/_archive_jekyll/docs/_posts/2010-01-07-post-modified.md similarity index 100% rename from docs/_posts/2010-01-07-post-modified.md rename to _archive_jekyll/docs/_posts/2010-01-07-post-modified.md diff --git a/docs/_posts/2010-01-07-post-standard.md b/_archive_jekyll/docs/_posts/2010-01-07-post-standard.md similarity index 100% rename from docs/_posts/2010-01-07-post-standard.md rename to _archive_jekyll/docs/_posts/2010-01-07-post-standard.md diff --git a/docs/_posts/2010-01-08-post-chat.md b/_archive_jekyll/docs/_posts/2010-01-08-post-chat.md similarity index 100% rename from docs/_posts/2010-01-08-post-chat.md rename to _archive_jekyll/docs/_posts/2010-01-08-post-chat.md diff --git a/docs/_posts/2010-02-05-post-notice.md b/_archive_jekyll/docs/_posts/2010-02-05-post-notice.md similarity index 100% rename from docs/_posts/2010-02-05-post-notice.md rename to _archive_jekyll/docs/_posts/2010-02-05-post-notice.md diff --git a/docs/_posts/2010-02-05-post-quote.md b/_archive_jekyll/docs/_posts/2010-02-05-post-quote.md similarity index 100% rename from docs/_posts/2010-02-05-post-quote.md rename to _archive_jekyll/docs/_posts/2010-02-05-post-quote.md diff --git a/docs/_posts/2010-03-07-post-link.md b/_archive_jekyll/docs/_posts/2010-03-07-post-link.md similarity index 100% rename from docs/_posts/2010-03-07-post-link.md rename to _archive_jekyll/docs/_posts/2010-03-07-post-link.md diff --git a/docs/_posts/2010-06-02-post-video-youtube.md b/_archive_jekyll/docs/_posts/2010-06-02-post-video-youtube.md similarity index 100% rename from docs/_posts/2010-06-02-post-video-youtube.md rename to _archive_jekyll/docs/_posts/2010-06-02-post-video-youtube.md diff --git a/docs/_posts/2010-08-05-post-header-image-og-override.md b/_archive_jekyll/docs/_posts/2010-08-05-post-header-image-og-override.md similarity index 100% rename from docs/_posts/2010-08-05-post-header-image-og-override.md rename to _archive_jekyll/docs/_posts/2010-08-05-post-header-image-og-override.md diff --git a/docs/_posts/2010-08-05-post-header-overlay-image-og-override.md b/_archive_jekyll/docs/_posts/2010-08-05-post-header-overlay-image-og-override.md similarity index 100% rename from docs/_posts/2010-08-05-post-header-overlay-image-og-override.md rename to _archive_jekyll/docs/_posts/2010-08-05-post-header-overlay-image-og-override.md diff --git a/docs/_posts/2010-08-05-post-image-linked.md b/_archive_jekyll/docs/_posts/2010-08-05-post-image-linked.md similarity index 100% rename from docs/_posts/2010-08-05-post-image-linked.md rename to _archive_jekyll/docs/_posts/2010-08-05-post-image-linked.md diff --git a/docs/_posts/2010-08-05-post-image-standard.md b/_archive_jekyll/docs/_posts/2010-08-05-post-image-standard.md similarity index 100% rename from docs/_posts/2010-08-05-post-image-standard.md rename to _archive_jekyll/docs/_posts/2010-08-05-post-image-standard.md diff --git a/docs/_posts/2010-08-05-post-teaser-image-og-override.md b/_archive_jekyll/docs/_posts/2010-08-05-post-teaser-image-og-override.md similarity index 100% rename from docs/_posts/2010-08-05-post-teaser-image-og-override.md rename to _archive_jekyll/docs/_posts/2010-08-05-post-teaser-image-og-override.md diff --git a/docs/_posts/2010-08-06-post-image-linked-caption.md b/_archive_jekyll/docs/_posts/2010-08-06-post-image-linked-caption.md similarity index 100% rename from docs/_posts/2010-08-06-post-image-linked-caption.md rename to _archive_jekyll/docs/_posts/2010-08-06-post-image-linked-caption.md diff --git a/docs/_posts/2010-08-07-post-image-caption.md b/_archive_jekyll/docs/_posts/2010-08-07-post-image-caption.md similarity index 100% rename from docs/_posts/2010-08-07-post-image-caption.md rename to _archive_jekyll/docs/_posts/2010-08-07-post-image-caption.md diff --git a/docs/_posts/2010-09-09-post-gallery.md b/_archive_jekyll/docs/_posts/2010-09-09-post-gallery.md similarity index 100% rename from docs/_posts/2010-09-09-post-gallery.md rename to _archive_jekyll/docs/_posts/2010-09-09-post-gallery.md diff --git a/docs/_posts/2010-09-10-post-twitter-embeds.md b/_archive_jekyll/docs/_posts/2010-09-10-post-twitter-embeds.md similarity index 100% rename from docs/_posts/2010-09-10-post-twitter-embeds.md rename to _archive_jekyll/docs/_posts/2010-09-10-post-twitter-embeds.md diff --git a/docs/_posts/2010-10-25-post-future-date.md b/_archive_jekyll/docs/_posts/2010-10-25-post-future-date.md similarity index 100% rename from docs/_posts/2010-10-25-post-future-date.md rename to _archive_jekyll/docs/_posts/2010-10-25-post-future-date.md diff --git a/docs/_posts/2012-01-02-layout-comments-disabled.md b/_archive_jekyll/docs/_posts/2012-01-02-layout-comments-disabled.md similarity index 100% rename from docs/_posts/2012-01-02-layout-comments-disabled.md rename to _archive_jekyll/docs/_posts/2012-01-02-layout-comments-disabled.md diff --git a/docs/_posts/2012-01-02-layout-comments.md b/_archive_jekyll/docs/_posts/2012-01-02-layout-comments.md similarity index 100% rename from docs/_posts/2012-01-02-layout-comments.md rename to _archive_jekyll/docs/_posts/2012-01-02-layout-comments.md diff --git a/docs/_posts/2012-01-02-layout-post-date-disabled.md b/_archive_jekyll/docs/_posts/2012-01-02-layout-post-date-disabled.md similarity index 100% rename from docs/_posts/2012-01-02-layout-post-date-disabled.md rename to _archive_jekyll/docs/_posts/2012-01-02-layout-post-date-disabled.md diff --git a/docs/_posts/2012-01-02-layout-post-date.md b/_archive_jekyll/docs/_posts/2012-01-02-layout-post-date.md similarity index 100% rename from docs/_posts/2012-01-02-layout-post-date.md rename to _archive_jekyll/docs/_posts/2012-01-02-layout-post-date.md diff --git a/docs/_posts/2012-01-02-layout-read-time-disabled.md b/_archive_jekyll/docs/_posts/2012-01-02-layout-read-time-disabled.md similarity index 100% rename from docs/_posts/2012-01-02-layout-read-time-disabled.md rename to _archive_jekyll/docs/_posts/2012-01-02-layout-read-time-disabled.md diff --git a/docs/_posts/2012-01-02-layout-read-time.md b/_archive_jekyll/docs/_posts/2012-01-02-layout-read-time.md similarity index 100% rename from docs/_posts/2012-01-02-layout-read-time.md rename to _archive_jekyll/docs/_posts/2012-01-02-layout-read-time.md diff --git a/docs/_posts/2012-01-02-layout-related-posts-disabled.md b/_archive_jekyll/docs/_posts/2012-01-02-layout-related-posts-disabled.md similarity index 100% rename from docs/_posts/2012-01-02-layout-related-posts-disabled.md rename to _archive_jekyll/docs/_posts/2012-01-02-layout-related-posts-disabled.md diff --git a/docs/_posts/2012-01-02-layout-related-posts.md b/_archive_jekyll/docs/_posts/2012-01-02-layout-related-posts.md similarity index 100% rename from docs/_posts/2012-01-02-layout-related-posts.md rename to _archive_jekyll/docs/_posts/2012-01-02-layout-related-posts.md diff --git a/docs/_posts/2012-01-02-layout-sharing-disabled.md b/_archive_jekyll/docs/_posts/2012-01-02-layout-sharing-disabled.md similarity index 100% rename from docs/_posts/2012-01-02-layout-sharing-disabled.md rename to _archive_jekyll/docs/_posts/2012-01-02-layout-sharing-disabled.md diff --git a/docs/_posts/2012-01-02-layout-sharing.md b/_archive_jekyll/docs/_posts/2012-01-02-layout-sharing.md similarity index 100% rename from docs/_posts/2012-01-02-layout-sharing.md rename to _archive_jekyll/docs/_posts/2012-01-02-layout-sharing.md diff --git a/docs/_posts/2012-01-03-layout-read-time-comments-sharing-related-posts-disabled.md b/_archive_jekyll/docs/_posts/2012-01-03-layout-read-time-comments-sharing-related-posts-disabled.md similarity index 100% rename from docs/_posts/2012-01-03-layout-read-time-comments-sharing-related-posts-disabled.md rename to _archive_jekyll/docs/_posts/2012-01-03-layout-read-time-comments-sharing-related-posts-disabled.md diff --git a/docs/_posts/2012-01-03-layout-table-of-contents-include-post.md b/_archive_jekyll/docs/_posts/2012-01-03-layout-table-of-contents-include-post.md similarity index 100% rename from docs/_posts/2012-01-03-layout-table-of-contents-include-post.md rename to _archive_jekyll/docs/_posts/2012-01-03-layout-table-of-contents-include-post.md diff --git a/docs/_posts/2012-01-03-layout-table-of-contents-indent-post.md b/_archive_jekyll/docs/_posts/2012-01-03-layout-table-of-contents-indent-post.md similarity index 100% rename from docs/_posts/2012-01-03-layout-table-of-contents-indent-post.md rename to _archive_jekyll/docs/_posts/2012-01-03-layout-table-of-contents-indent-post.md diff --git a/docs/_posts/2012-01-03-layout-table-of-contents-post.md b/_archive_jekyll/docs/_posts/2012-01-03-layout-table-of-contents-post.md similarity index 100% rename from docs/_posts/2012-01-03-layout-table-of-contents-post.md rename to _archive_jekyll/docs/_posts/2012-01-03-layout-table-of-contents-post.md diff --git a/docs/_posts/2012-01-03-layout-table-of-contents-sticky.md b/_archive_jekyll/docs/_posts/2012-01-03-layout-table-of-contents-sticky.md similarity index 100% rename from docs/_posts/2012-01-03-layout-table-of-contents-sticky.md rename to _archive_jekyll/docs/_posts/2012-01-03-layout-table-of-contents-sticky.md diff --git a/docs/_posts/2012-03-14-layout-code-excerpt-generated.md b/_archive_jekyll/docs/_posts/2012-03-14-layout-code-excerpt-generated.md similarity index 100% rename from docs/_posts/2012-03-14-layout-code-excerpt-generated.md rename to _archive_jekyll/docs/_posts/2012-03-14-layout-code-excerpt-generated.md diff --git a/docs/_posts/2012-03-14-layout-excerpt-defined.md b/_archive_jekyll/docs/_posts/2012-03-14-layout-excerpt-defined.md similarity index 100% rename from docs/_posts/2012-03-14-layout-excerpt-defined.md rename to _archive_jekyll/docs/_posts/2012-03-14-layout-excerpt-defined.md diff --git a/docs/_posts/2012-03-14-layout-excerpt-generated.md b/_archive_jekyll/docs/_posts/2012-03-14-layout-excerpt-generated.md similarity index 100% rename from docs/_posts/2012-03-14-layout-excerpt-generated.md rename to _archive_jekyll/docs/_posts/2012-03-14-layout-excerpt-generated.md diff --git a/docs/_posts/2012-03-15-layout-author-override.md b/_archive_jekyll/docs/_posts/2012-03-15-layout-author-override.md similarity index 100% rename from docs/_posts/2012-03-15-layout-author-override.md rename to _archive_jekyll/docs/_posts/2012-03-15-layout-author-override.md diff --git a/docs/_posts/2012-03-15-layout-author-sidebar-disabled.md b/_archive_jekyll/docs/_posts/2012-03-15-layout-author-sidebar-disabled.md similarity index 100% rename from docs/_posts/2012-03-15-layout-author-sidebar-disabled.md rename to _archive_jekyll/docs/_posts/2012-03-15-layout-author-sidebar-disabled.md diff --git a/docs/_posts/2012-03-15-layout-header-image-external.md b/_archive_jekyll/docs/_posts/2012-03-15-layout-header-image-external.md similarity index 100% rename from docs/_posts/2012-03-15-layout-header-image-external.md rename to _archive_jekyll/docs/_posts/2012-03-15-layout-header-image-external.md diff --git a/docs/_posts/2012-03-15-layout-header-image-horizontal.md b/_archive_jekyll/docs/_posts/2012-03-15-layout-header-image-horizontal.md similarity index 100% rename from docs/_posts/2012-03-15-layout-header-image-horizontal.md rename to _archive_jekyll/docs/_posts/2012-03-15-layout-header-image-horizontal.md diff --git a/docs/_posts/2012-03-15-layout-header-image-text-readability.md b/_archive_jekyll/docs/_posts/2012-03-15-layout-header-image-text-readability.md similarity index 100% rename from docs/_posts/2012-03-15-layout-header-image-text-readability.md rename to _archive_jekyll/docs/_posts/2012-03-15-layout-header-image-text-readability.md diff --git a/docs/_posts/2012-03-15-layout-header-image-vertical.md b/_archive_jekyll/docs/_posts/2012-03-15-layout-header-image-vertical.md similarity index 100% rename from docs/_posts/2012-03-15-layout-header-image-vertical.md rename to _archive_jekyll/docs/_posts/2012-03-15-layout-header-image-vertical.md diff --git a/docs/_posts/2012-03-15-layout-header-overlay-color.md b/_archive_jekyll/docs/_posts/2012-03-15-layout-header-overlay-color.md similarity index 100% rename from docs/_posts/2012-03-15-layout-header-overlay-color.md rename to _archive_jekyll/docs/_posts/2012-03-15-layout-header-overlay-color.md diff --git a/docs/_posts/2012-03-15-layout-header-overlay-image-tagline.md b/_archive_jekyll/docs/_posts/2012-03-15-layout-header-overlay-image-tagline.md similarity index 100% rename from docs/_posts/2012-03-15-layout-header-overlay-image-tagline.md rename to _archive_jekyll/docs/_posts/2012-03-15-layout-header-overlay-image-tagline.md diff --git a/docs/_posts/2012-03-15-layout-header-overlay-image.md b/_archive_jekyll/docs/_posts/2012-03-15-layout-header-overlay-image.md similarity index 100% rename from docs/_posts/2012-03-15-layout-header-overlay-image.md rename to _archive_jekyll/docs/_posts/2012-03-15-layout-header-overlay-image.md diff --git a/docs/_posts/2012-03-15-layout-more-tag.md b/_archive_jekyll/docs/_posts/2012-03-15-layout-more-tag.md similarity index 100% rename from docs/_posts/2012-03-15-layout-more-tag.md rename to _archive_jekyll/docs/_posts/2012-03-15-layout-more-tag.md diff --git a/docs/_posts/2012-03-15-layout-sidebar-custom.md b/_archive_jekyll/docs/_posts/2012-03-15-layout-sidebar-custom.md similarity index 100% rename from docs/_posts/2012-03-15-layout-sidebar-custom.md rename to _archive_jekyll/docs/_posts/2012-03-15-layout-sidebar-custom.md diff --git a/docs/_posts/2012-03-15-layout-sidebar-nav-list.md b/_archive_jekyll/docs/_posts/2012-03-15-layout-sidebar-nav-list.md similarity index 100% rename from docs/_posts/2012-03-15-layout-sidebar-nav-list.md rename to _archive_jekyll/docs/_posts/2012-03-15-layout-sidebar-nav-list.md diff --git a/docs/_posts/2012-05-22-markup-text-readability-wide-page.md b/_archive_jekyll/docs/_posts/2012-05-22-markup-text-readability-wide-page.md similarity index 100% rename from docs/_posts/2012-05-22-markup-text-readability-wide-page.md rename to _archive_jekyll/docs/_posts/2012-05-22-markup-text-readability-wide-page.md diff --git a/docs/_posts/2012-05-22-markup-text-readability.md b/_archive_jekyll/docs/_posts/2012-05-22-markup-text-readability.md similarity index 100% rename from docs/_posts/2012-05-22-markup-text-readability.md rename to _archive_jekyll/docs/_posts/2012-05-22-markup-text-readability.md diff --git a/docs/_posts/2013-01-05-markup-title-with-markup.md b/_archive_jekyll/docs/_posts/2013-01-05-markup-title-with-markup.md similarity index 100% rename from docs/_posts/2013-01-05-markup-title-with-markup.md rename to _archive_jekyll/docs/_posts/2013-01-05-markup-title-with-markup.md diff --git a/docs/_posts/2013-01-05-markup-title-with-special-characters.md b/_archive_jekyll/docs/_posts/2013-01-05-markup-title-with-special-characters.md similarity index 100% rename from docs/_posts/2013-01-05-markup-title-with-special-characters.md rename to _archive_jekyll/docs/_posts/2013-01-05-markup-title-with-special-characters.md diff --git a/docs/_posts/2013-01-09-markup-text-alignment.md b/_archive_jekyll/docs/_posts/2013-01-09-markup-text-alignment.md similarity index 100% rename from docs/_posts/2013-01-09-markup-text-alignment.md rename to _archive_jekyll/docs/_posts/2013-01-09-markup-text-alignment.md diff --git a/docs/_posts/2013-01-10-markup-image-alignment.md b/_archive_jekyll/docs/_posts/2013-01-10-markup-image-alignment.md similarity index 100% rename from docs/_posts/2013-01-10-markup-image-alignment.md rename to _archive_jekyll/docs/_posts/2013-01-10-markup-image-alignment.md diff --git a/docs/_posts/2013-01-11-markup-html-tags-and-formatting.md b/_archive_jekyll/docs/_posts/2013-01-11-markup-html-tags-and-formatting.md similarity index 100% rename from docs/_posts/2013-01-11-markup-html-tags-and-formatting.md rename to _archive_jekyll/docs/_posts/2013-01-11-markup-html-tags-and-formatting.md diff --git a/docs/_posts/2013-05-22-markup-more-images.md b/_archive_jekyll/docs/_posts/2013-05-22-markup-more-images.md similarity index 100% rename from docs/_posts/2013-05-22-markup-more-images.md rename to _archive_jekyll/docs/_posts/2013-05-22-markup-more-images.md diff --git a/docs/_posts/2013-08-16-markup-syntax-highlighting.md b/_archive_jekyll/docs/_posts/2013-08-16-markup-syntax-highlighting.md similarity index 100% rename from docs/_posts/2013-08-16-markup-syntax-highlighting.md rename to _archive_jekyll/docs/_posts/2013-08-16-markup-syntax-highlighting.md diff --git a/docs/_posts/2016-02-24-welcome-to-jekyll.md b/_archive_jekyll/docs/_posts/2016-02-24-welcome-to-jekyll.md similarity index 100% rename from docs/_posts/2016-02-24-welcome-to-jekyll.md rename to _archive_jekyll/docs/_posts/2016-02-24-welcome-to-jekyll.md diff --git a/docs/_posts/2016-09-21-gemified-theme-alpha.md b/_archive_jekyll/docs/_posts/2016-09-21-gemified-theme-alpha.md similarity index 100% rename from docs/_posts/2016-09-21-gemified-theme-alpha.md rename to _archive_jekyll/docs/_posts/2016-09-21-gemified-theme-alpha.md diff --git a/docs/_posts/2016-10-06-gemified-theme-beta.md b/_archive_jekyll/docs/_posts/2016-10-06-gemified-theme-beta.md similarity index 100% rename from docs/_posts/2016-10-06-gemified-theme-beta.md rename to _archive_jekyll/docs/_posts/2016-10-06-gemified-theme-beta.md diff --git a/docs/_posts/2017-01-23-layout-header-video.md b/_archive_jekyll/docs/_posts/2017-01-23-layout-header-video.md similarity index 100% rename from docs/_posts/2017-01-23-layout-header-video.md rename to _archive_jekyll/docs/_posts/2017-01-23-layout-header-video.md diff --git a/docs/_posts/2017-11-28-post-exclude-search.md b/_archive_jekyll/docs/_posts/2017-11-28-post-exclude-search.md similarity index 100% rename from docs/_posts/2017-11-28-post-exclude-search.md rename to _archive_jekyll/docs/_posts/2017-11-28-post-exclude-search.md diff --git a/docs/_recipes/chocolate-chip-cookies.md b/_archive_jekyll/docs/_recipes/chocolate-chip-cookies.md similarity index 100% rename from docs/_recipes/chocolate-chip-cookies.md rename to _archive_jekyll/docs/_recipes/chocolate-chip-cookies.md diff --git a/docs/_recipes/oatmeal-cookies.md b/_archive_jekyll/docs/_recipes/oatmeal-cookies.md similarity index 100% rename from docs/_recipes/oatmeal-cookies.md rename to _archive_jekyll/docs/_recipes/oatmeal-cookies.md diff --git a/docs/_recipes/peanut-butter-cookies.md b/_archive_jekyll/docs/_recipes/peanut-butter-cookies.md similarity index 100% rename from docs/_recipes/peanut-butter-cookies.md rename to _archive_jekyll/docs/_recipes/peanut-butter-cookies.md diff --git a/docs/assets/images/350x250.png b/_archive_jekyll/docs/assets/images/350x250.png similarity index 100% rename from docs/assets/images/350x250.png rename to _archive_jekyll/docs/assets/images/350x250.png diff --git a/docs/assets/images/3953273590_704e3899d5_m.jpg b/_archive_jekyll/docs/assets/images/3953273590_704e3899d5_m.jpg similarity index 100% rename from docs/assets/images/3953273590_704e3899d5_m.jpg rename to _archive_jekyll/docs/assets/images/3953273590_704e3899d5_m.jpg diff --git a/docs/assets/images/500x300.png b/_archive_jekyll/docs/assets/images/500x300.png similarity index 100% rename from docs/assets/images/500x300.png rename to _archive_jekyll/docs/assets/images/500x300.png diff --git a/docs/assets/images/air-skin-archive-large.png b/_archive_jekyll/docs/assets/images/air-skin-archive-large.png similarity index 100% rename from docs/assets/images/air-skin-archive-large.png rename to _archive_jekyll/docs/assets/images/air-skin-archive-large.png diff --git a/docs/assets/images/air-skin-archive.png b/_archive_jekyll/docs/assets/images/air-skin-archive.png similarity index 100% rename from docs/assets/images/air-skin-archive.png rename to _archive_jekyll/docs/assets/images/air-skin-archive.png diff --git a/docs/assets/images/air-skin-post-large.png b/_archive_jekyll/docs/assets/images/air-skin-post-large.png similarity index 100% rename from docs/assets/images/air-skin-post-large.png rename to _archive_jekyll/docs/assets/images/air-skin-post-large.png diff --git a/docs/assets/images/air-skin-post.png b/_archive_jekyll/docs/assets/images/air-skin-post.png similarity index 100% rename from docs/assets/images/air-skin-post.png rename to _archive_jekyll/docs/assets/images/air-skin-post.png diff --git a/docs/assets/images/android-chrome-144x144.png b/_archive_jekyll/docs/assets/images/android-chrome-144x144.png similarity index 100% rename from docs/assets/images/android-chrome-144x144.png rename to _archive_jekyll/docs/assets/images/android-chrome-144x144.png diff --git a/docs/assets/images/android-chrome-192x192.png b/_archive_jekyll/docs/assets/images/android-chrome-192x192.png similarity index 100% rename from docs/assets/images/android-chrome-192x192.png rename to _archive_jekyll/docs/assets/images/android-chrome-192x192.png diff --git a/docs/assets/images/android-chrome-36x36.png b/_archive_jekyll/docs/assets/images/android-chrome-36x36.png similarity index 100% rename from docs/assets/images/android-chrome-36x36.png rename to _archive_jekyll/docs/assets/images/android-chrome-36x36.png diff --git a/docs/assets/images/android-chrome-48x48.png b/_archive_jekyll/docs/assets/images/android-chrome-48x48.png similarity index 100% rename from docs/assets/images/android-chrome-48x48.png rename to _archive_jekyll/docs/assets/images/android-chrome-48x48.png diff --git a/docs/assets/images/android-chrome-72x72.png b/_archive_jekyll/docs/assets/images/android-chrome-72x72.png similarity index 100% rename from docs/assets/images/android-chrome-72x72.png rename to _archive_jekyll/docs/assets/images/android-chrome-72x72.png diff --git a/docs/assets/images/android-chrome-96x96.png b/_archive_jekyll/docs/assets/images/android-chrome-96x96.png similarity index 100% rename from docs/assets/images/android-chrome-96x96.png rename to _archive_jekyll/docs/assets/images/android-chrome-96x96.png diff --git a/docs/assets/images/apple-touch-icon-114x114.png b/_archive_jekyll/docs/assets/images/apple-touch-icon-114x114.png similarity index 100% rename from docs/assets/images/apple-touch-icon-114x114.png rename to _archive_jekyll/docs/assets/images/apple-touch-icon-114x114.png diff --git a/docs/assets/images/apple-touch-icon-120x120.png b/_archive_jekyll/docs/assets/images/apple-touch-icon-120x120.png similarity index 100% rename from docs/assets/images/apple-touch-icon-120x120.png rename to _archive_jekyll/docs/assets/images/apple-touch-icon-120x120.png diff --git a/docs/assets/images/apple-touch-icon-144x144.png b/_archive_jekyll/docs/assets/images/apple-touch-icon-144x144.png similarity index 100% rename from docs/assets/images/apple-touch-icon-144x144.png rename to _archive_jekyll/docs/assets/images/apple-touch-icon-144x144.png diff --git a/docs/assets/images/apple-touch-icon-152x152.png b/_archive_jekyll/docs/assets/images/apple-touch-icon-152x152.png similarity index 100% rename from docs/assets/images/apple-touch-icon-152x152.png rename to _archive_jekyll/docs/assets/images/apple-touch-icon-152x152.png diff --git a/docs/assets/images/apple-touch-icon-180x180.png b/_archive_jekyll/docs/assets/images/apple-touch-icon-180x180.png similarity index 100% rename from docs/assets/images/apple-touch-icon-180x180.png rename to _archive_jekyll/docs/assets/images/apple-touch-icon-180x180.png diff --git a/docs/assets/images/apple-touch-icon-57x57.png b/_archive_jekyll/docs/assets/images/apple-touch-icon-57x57.png similarity index 100% rename from docs/assets/images/apple-touch-icon-57x57.png rename to _archive_jekyll/docs/assets/images/apple-touch-icon-57x57.png diff --git a/docs/assets/images/apple-touch-icon-60x60.png b/_archive_jekyll/docs/assets/images/apple-touch-icon-60x60.png similarity index 100% rename from docs/assets/images/apple-touch-icon-60x60.png rename to _archive_jekyll/docs/assets/images/apple-touch-icon-60x60.png diff --git a/docs/assets/images/apple-touch-icon-72x72.png b/_archive_jekyll/docs/assets/images/apple-touch-icon-72x72.png similarity index 100% rename from docs/assets/images/apple-touch-icon-72x72.png rename to _archive_jekyll/docs/assets/images/apple-touch-icon-72x72.png diff --git a/docs/assets/images/apple-touch-icon-76x76.png b/_archive_jekyll/docs/assets/images/apple-touch-icon-76x76.png similarity index 100% rename from docs/assets/images/apple-touch-icon-76x76.png rename to _archive_jekyll/docs/assets/images/apple-touch-icon-76x76.png diff --git a/docs/assets/images/apple-touch-icon-precomposed.png b/_archive_jekyll/docs/assets/images/apple-touch-icon-precomposed.png similarity index 100% rename from docs/assets/images/apple-touch-icon-precomposed.png rename to _archive_jekyll/docs/assets/images/apple-touch-icon-precomposed.png diff --git a/docs/assets/images/apple-touch-icon.png b/_archive_jekyll/docs/assets/images/apple-touch-icon.png similarity index 100% rename from docs/assets/images/apple-touch-icon.png rename to _archive_jekyll/docs/assets/images/apple-touch-icon.png diff --git a/docs/assets/images/aqua-skin-archive-large.png b/_archive_jekyll/docs/assets/images/aqua-skin-archive-large.png similarity index 100% rename from docs/assets/images/aqua-skin-archive-large.png rename to _archive_jekyll/docs/assets/images/aqua-skin-archive-large.png diff --git a/docs/assets/images/aqua-skin-archive.png b/_archive_jekyll/docs/assets/images/aqua-skin-archive.png similarity index 100% rename from docs/assets/images/aqua-skin-archive.png rename to _archive_jekyll/docs/assets/images/aqua-skin-archive.png diff --git a/docs/assets/images/aqua-skin-post-large.png b/_archive_jekyll/docs/assets/images/aqua-skin-post-large.png similarity index 100% rename from docs/assets/images/aqua-skin-post-large.png rename to _archive_jekyll/docs/assets/images/aqua-skin-post-large.png diff --git a/docs/assets/images/aqua-skin-post.png b/_archive_jekyll/docs/assets/images/aqua-skin-post.png similarity index 100% rename from docs/assets/images/aqua-skin-post.png rename to _archive_jekyll/docs/assets/images/aqua-skin-post.png diff --git a/docs/assets/images/bio-photo-2.jpg b/_archive_jekyll/docs/assets/images/bio-photo-2.jpg similarity index 100% rename from docs/assets/images/bio-photo-2.jpg rename to _archive_jekyll/docs/assets/images/bio-photo-2.jpg diff --git a/docs/assets/images/bio-photo.jpg b/_archive_jekyll/docs/assets/images/bio-photo.jpg similarity index 100% rename from docs/assets/images/bio-photo.jpg rename to _archive_jekyll/docs/assets/images/bio-photo.jpg diff --git a/docs/assets/images/browserconfig.xml b/_archive_jekyll/docs/assets/images/browserconfig.xml similarity index 100% rename from docs/assets/images/browserconfig.xml rename to _archive_jekyll/docs/assets/images/browserconfig.xml diff --git a/docs/assets/images/contrast-code-block.jpg b/_archive_jekyll/docs/assets/images/contrast-code-block.jpg similarity index 100% rename from docs/assets/images/contrast-code-block.jpg rename to _archive_jekyll/docs/assets/images/contrast-code-block.jpg diff --git a/docs/assets/images/contrast-skin-archive-large.png b/_archive_jekyll/docs/assets/images/contrast-skin-archive-large.png similarity index 100% rename from docs/assets/images/contrast-skin-archive-large.png rename to _archive_jekyll/docs/assets/images/contrast-skin-archive-large.png diff --git a/docs/assets/images/contrast-skin-archive.png b/_archive_jekyll/docs/assets/images/contrast-skin-archive.png similarity index 100% rename from docs/assets/images/contrast-skin-archive.png rename to _archive_jekyll/docs/assets/images/contrast-skin-archive.png diff --git a/docs/assets/images/contrast-skin-post-large.png b/_archive_jekyll/docs/assets/images/contrast-skin-post-large.png similarity index 100% rename from docs/assets/images/contrast-skin-post-large.png rename to _archive_jekyll/docs/assets/images/contrast-skin-post-large.png diff --git a/docs/assets/images/contrast-skin-post.png b/_archive_jekyll/docs/assets/images/contrast-skin-post.png similarity index 100% rename from docs/assets/images/contrast-skin-post.png rename to _archive_jekyll/docs/assets/images/contrast-skin-post.png diff --git a/docs/assets/images/dark-code-block.jpg b/_archive_jekyll/docs/assets/images/dark-code-block.jpg similarity index 100% rename from docs/assets/images/dark-code-block.jpg rename to _archive_jekyll/docs/assets/images/dark-code-block.jpg diff --git a/docs/assets/images/dark-skin-archive-large.png b/_archive_jekyll/docs/assets/images/dark-skin-archive-large.png similarity index 100% rename from docs/assets/images/dark-skin-archive-large.png rename to _archive_jekyll/docs/assets/images/dark-skin-archive-large.png diff --git a/docs/assets/images/dark-skin-archive.png b/_archive_jekyll/docs/assets/images/dark-skin-archive.png similarity index 100% rename from docs/assets/images/dark-skin-archive.png rename to _archive_jekyll/docs/assets/images/dark-skin-archive.png diff --git a/docs/assets/images/dark-skin-post-large.png b/_archive_jekyll/docs/assets/images/dark-skin-post-large.png similarity index 100% rename from docs/assets/images/dark-skin-post-large.png rename to _archive_jekyll/docs/assets/images/dark-skin-post-large.png diff --git a/docs/assets/images/dark-skin-post.png b/_archive_jekyll/docs/assets/images/dark-skin-post.png similarity index 100% rename from docs/assets/images/dark-skin-post.png rename to _archive_jekyll/docs/assets/images/dark-skin-post.png diff --git a/docs/assets/images/default-code-block.jpg b/_archive_jekyll/docs/assets/images/default-code-block.jpg similarity index 100% rename from docs/assets/images/default-code-block.jpg rename to _archive_jekyll/docs/assets/images/default-code-block.jpg diff --git a/docs/assets/images/dirt-code-block.jpg b/_archive_jekyll/docs/assets/images/dirt-code-block.jpg similarity index 100% rename from docs/assets/images/dirt-code-block.jpg rename to _archive_jekyll/docs/assets/images/dirt-code-block.jpg diff --git a/docs/assets/images/dirt-skin-archive-large.png b/_archive_jekyll/docs/assets/images/dirt-skin-archive-large.png similarity index 100% rename from docs/assets/images/dirt-skin-archive-large.png rename to _archive_jekyll/docs/assets/images/dirt-skin-archive-large.png diff --git a/docs/assets/images/dirt-skin-archive.png b/_archive_jekyll/docs/assets/images/dirt-skin-archive.png similarity index 100% rename from docs/assets/images/dirt-skin-archive.png rename to _archive_jekyll/docs/assets/images/dirt-skin-archive.png diff --git a/docs/assets/images/dirt-skin-post-large.png b/_archive_jekyll/docs/assets/images/dirt-skin-post-large.png similarity index 100% rename from docs/assets/images/dirt-skin-post-large.png rename to _archive_jekyll/docs/assets/images/dirt-skin-post-large.png diff --git a/docs/assets/images/dirt-skin-post.png b/_archive_jekyll/docs/assets/images/dirt-skin-post.png similarity index 100% rename from docs/assets/images/dirt-skin-post.png rename to _archive_jekyll/docs/assets/images/dirt-skin-post.png diff --git a/docs/assets/images/dracula-code-block.jpg b/_archive_jekyll/docs/assets/images/dracula-code-block.jpg similarity index 100% rename from docs/assets/images/dracula-code-block.jpg rename to _archive_jekyll/docs/assets/images/dracula-code-block.jpg diff --git a/docs/assets/images/facebook-share-example.jpg b/_archive_jekyll/docs/assets/images/facebook-share-example.jpg similarity index 100% rename from docs/assets/images/facebook-share-example.jpg rename to _archive_jekyll/docs/assets/images/facebook-share-example.jpg diff --git a/docs/assets/images/favicon-16x16.png b/_archive_jekyll/docs/assets/images/favicon-16x16.png similarity index 100% rename from docs/assets/images/favicon-16x16.png rename to _archive_jekyll/docs/assets/images/favicon-16x16.png diff --git a/docs/assets/images/favicon-32x32.png b/_archive_jekyll/docs/assets/images/favicon-32x32.png similarity index 100% rename from docs/assets/images/favicon-32x32.png rename to _archive_jekyll/docs/assets/images/favicon-32x32.png diff --git a/docs/assets/images/favicon-96x96.png b/_archive_jekyll/docs/assets/images/favicon-96x96.png similarity index 100% rename from docs/assets/images/favicon-96x96.png rename to _archive_jekyll/docs/assets/images/favicon-96x96.png diff --git a/docs/assets/images/favicon.ico b/_archive_jekyll/docs/assets/images/favicon.ico similarity index 100% rename from docs/assets/images/favicon.ico rename to _archive_jekyll/docs/assets/images/favicon.ico diff --git a/docs/assets/images/foo-bar-identity-th.jpg b/_archive_jekyll/docs/assets/images/foo-bar-identity-th.jpg similarity index 100% rename from docs/assets/images/foo-bar-identity-th.jpg rename to _archive_jekyll/docs/assets/images/foo-bar-identity-th.jpg diff --git a/docs/assets/images/foo-bar-identity.jpg b/_archive_jekyll/docs/assets/images/foo-bar-identity.jpg similarity index 100% rename from docs/assets/images/foo-bar-identity.jpg rename to _archive_jekyll/docs/assets/images/foo-bar-identity.jpg diff --git a/docs/assets/images/google-custom-search-engine-layout.png b/_archive_jekyll/docs/assets/images/google-custom-search-engine-layout.png similarity index 100% rename from docs/assets/images/google-custom-search-engine-layout.png rename to _archive_jekyll/docs/assets/images/google-custom-search-engine-layout.png diff --git a/docs/assets/images/image-alignment-1200x4002.jpg b/_archive_jekyll/docs/assets/images/image-alignment-1200x4002.jpg similarity index 100% rename from docs/assets/images/image-alignment-1200x4002.jpg rename to _archive_jekyll/docs/assets/images/image-alignment-1200x4002.jpg diff --git a/docs/assets/images/image-alignment-150x150.jpg b/_archive_jekyll/docs/assets/images/image-alignment-150x150.jpg similarity index 100% rename from docs/assets/images/image-alignment-150x150.jpg rename to _archive_jekyll/docs/assets/images/image-alignment-150x150.jpg diff --git a/docs/assets/images/image-alignment-300x200.jpg b/_archive_jekyll/docs/assets/images/image-alignment-300x200.jpg similarity index 100% rename from docs/assets/images/image-alignment-300x200.jpg rename to _archive_jekyll/docs/assets/images/image-alignment-300x200.jpg diff --git a/docs/assets/images/image-alignment-580x300.jpg b/_archive_jekyll/docs/assets/images/image-alignment-580x300.jpg similarity index 100% rename from docs/assets/images/image-alignment-580x300.jpg rename to _archive_jekyll/docs/assets/images/image-alignment-580x300.jpg diff --git a/docs/assets/images/manifest.json b/_archive_jekyll/docs/assets/images/manifest.json similarity index 100% rename from docs/assets/images/manifest.json rename to _archive_jekyll/docs/assets/images/manifest.json diff --git a/docs/assets/images/markup-syntax-highlighting-teaser.jpg b/_archive_jekyll/docs/assets/images/markup-syntax-highlighting-teaser.jpg similarity index 100% rename from docs/assets/images/markup-syntax-highlighting-teaser.jpg rename to _archive_jekyll/docs/assets/images/markup-syntax-highlighting-teaser.jpg diff --git a/docs/assets/images/masthead-search.gif b/_archive_jekyll/docs/assets/images/masthead-search.gif similarity index 100% rename from docs/assets/images/masthead-search.gif rename to _archive_jekyll/docs/assets/images/masthead-search.gif diff --git a/docs/assets/images/michael-rose.jpg b/_archive_jekyll/docs/assets/images/michael-rose.jpg similarity index 100% rename from docs/assets/images/michael-rose.jpg rename to _archive_jekyll/docs/assets/images/michael-rose.jpg diff --git a/docs/assets/images/mint-skin-archive-large.png b/_archive_jekyll/docs/assets/images/mint-skin-archive-large.png similarity index 100% rename from docs/assets/images/mint-skin-archive-large.png rename to _archive_jekyll/docs/assets/images/mint-skin-archive-large.png diff --git a/docs/assets/images/mint-skin-archive.png b/_archive_jekyll/docs/assets/images/mint-skin-archive.png similarity index 100% rename from docs/assets/images/mint-skin-archive.png rename to _archive_jekyll/docs/assets/images/mint-skin-archive.png diff --git a/docs/assets/images/mint-skin-post-large.png b/_archive_jekyll/docs/assets/images/mint-skin-post-large.png similarity index 100% rename from docs/assets/images/mint-skin-post-large.png rename to _archive_jekyll/docs/assets/images/mint-skin-post-large.png diff --git a/docs/assets/images/mint-skin-post.png b/_archive_jekyll/docs/assets/images/mint-skin-post.png similarity index 100% rename from docs/assets/images/mint-skin-post.png rename to _archive_jekyll/docs/assets/images/mint-skin-post.png diff --git a/docs/assets/images/mm-archive-grid-view-example.jpg b/_archive_jekyll/docs/assets/images/mm-archive-grid-view-example.jpg similarity index 100% rename from docs/assets/images/mm-archive-grid-view-example.jpg rename to _archive_jekyll/docs/assets/images/mm-archive-grid-view-example.jpg diff --git a/docs/assets/images/mm-author-profile-reddit-color.png b/_archive_jekyll/docs/assets/images/mm-author-profile-reddit-color.png similarity index 100% rename from docs/assets/images/mm-author-profile-reddit-color.png rename to _archive_jekyll/docs/assets/images/mm-author-profile-reddit-color.png diff --git a/docs/assets/images/mm-author-profile-reddit-gs.png b/_archive_jekyll/docs/assets/images/mm-author-profile-reddit-gs.png similarity index 100% rename from docs/assets/images/mm-author-profile-reddit-gs.png rename to _archive_jekyll/docs/assets/images/mm-author-profile-reddit-gs.png diff --git a/docs/assets/images/mm-author-sidebar-example.jpg b/_archive_jekyll/docs/assets/images/mm-author-sidebar-example.jpg similarity index 100% rename from docs/assets/images/mm-author-sidebar-example.jpg rename to _archive_jekyll/docs/assets/images/mm-author-sidebar-example.jpg diff --git a/docs/assets/images/mm-breadcrumbs-example.jpg b/_archive_jekyll/docs/assets/images/mm-breadcrumbs-example.jpg similarity index 100% rename from docs/assets/images/mm-breadcrumbs-example.jpg rename to _archive_jekyll/docs/assets/images/mm-breadcrumbs-example.jpg diff --git a/docs/assets/images/mm-browser-mockups.png b/_archive_jekyll/docs/assets/images/mm-browser-mockups.png similarity index 100% rename from docs/assets/images/mm-browser-mockups.png rename to _archive_jekyll/docs/assets/images/mm-browser-mockups.png diff --git a/docs/assets/images/mm-bundle-install.gif b/_archive_jekyll/docs/assets/images/mm-bundle-install.gif similarity index 100% rename from docs/assets/images/mm-bundle-install.gif rename to _archive_jekyll/docs/assets/images/mm-bundle-install.gif diff --git a/docs/assets/images/mm-custom-sidebar-example.jpg b/_archive_jekyll/docs/assets/images/mm-custom-sidebar-example.jpg similarity index 100% rename from docs/assets/images/mm-custom-sidebar-example.jpg rename to _archive_jekyll/docs/assets/images/mm-custom-sidebar-example.jpg diff --git a/docs/assets/images/mm-custom-sidebar-nav.jpg b/_archive_jekyll/docs/assets/images/mm-custom-sidebar-nav.jpg similarity index 100% rename from docs/assets/images/mm-custom-sidebar-nav.jpg rename to _archive_jekyll/docs/assets/images/mm-custom-sidebar-nav.jpg diff --git a/docs/assets/images/mm-customizable-feature.png b/_archive_jekyll/docs/assets/images/mm-customizable-feature.png similarity index 100% rename from docs/assets/images/mm-customizable-feature.png rename to _archive_jekyll/docs/assets/images/mm-customizable-feature.png diff --git a/docs/assets/images/mm-free-feature.png b/_archive_jekyll/docs/assets/images/mm-free-feature.png similarity index 100% rename from docs/assets/images/mm-free-feature.png rename to _archive_jekyll/docs/assets/images/mm-free-feature.png diff --git a/docs/assets/images/mm-gh-pages.gif b/_archive_jekyll/docs/assets/images/mm-gh-pages.gif similarity index 100% rename from docs/assets/images/mm-gh-pages.gif rename to _archive_jekyll/docs/assets/images/mm-gh-pages.gif diff --git a/docs/assets/images/mm-github-copy-repo-url.jpg b/_archive_jekyll/docs/assets/images/mm-github-copy-repo-url.jpg similarity index 100% rename from docs/assets/images/mm-github-copy-repo-url.jpg rename to _archive_jekyll/docs/assets/images/mm-github-copy-repo-url.jpg diff --git a/docs/assets/images/mm-github-edit-config.gif b/_archive_jekyll/docs/assets/images/mm-github-edit-config.gif similarity index 100% rename from docs/assets/images/mm-github-edit-config.gif rename to _archive_jekyll/docs/assets/images/mm-github-edit-config.gif diff --git a/docs/assets/images/mm-header-overlay-black-filter.jpg b/_archive_jekyll/docs/assets/images/mm-header-overlay-black-filter.jpg similarity index 100% rename from docs/assets/images/mm-header-overlay-black-filter.jpg rename to _archive_jekyll/docs/assets/images/mm-header-overlay-black-filter.jpg diff --git a/docs/assets/images/mm-header-overlay-custom-filter.jpg b/_archive_jekyll/docs/assets/images/mm-header-overlay-custom-filter.jpg similarity index 100% rename from docs/assets/images/mm-header-overlay-custom-filter.jpg rename to _archive_jekyll/docs/assets/images/mm-header-overlay-custom-filter.jpg diff --git a/docs/assets/images/mm-header-overlay-red-filter.jpg b/_archive_jekyll/docs/assets/images/mm-header-overlay-red-filter.jpg similarity index 100% rename from docs/assets/images/mm-header-overlay-red-filter.jpg rename to _archive_jekyll/docs/assets/images/mm-header-overlay-red-filter.jpg diff --git a/docs/assets/images/mm-home-page-feature.jpg b/_archive_jekyll/docs/assets/images/mm-home-page-feature.jpg similarity index 100% rename from docs/assets/images/mm-home-page-feature.jpg rename to _archive_jekyll/docs/assets/images/mm-home-page-feature.jpg diff --git a/docs/assets/images/mm-home-post-pagination-example.jpg b/_archive_jekyll/docs/assets/images/mm-home-post-pagination-example.jpg similarity index 100% rename from docs/assets/images/mm-home-post-pagination-example.jpg rename to _archive_jekyll/docs/assets/images/mm-home-post-pagination-example.jpg diff --git a/docs/assets/images/mm-layout-archive-taxonomy.png b/_archive_jekyll/docs/assets/images/mm-layout-archive-taxonomy.png similarity index 100% rename from docs/assets/images/mm-layout-archive-taxonomy.png rename to _archive_jekyll/docs/assets/images/mm-layout-archive-taxonomy.png diff --git a/docs/assets/images/mm-layout-archive.png b/_archive_jekyll/docs/assets/images/mm-layout-archive.png similarity index 100% rename from docs/assets/images/mm-layout-archive.png rename to _archive_jekyll/docs/assets/images/mm-layout-archive.png diff --git a/docs/assets/images/mm-layout-examples.png b/_archive_jekyll/docs/assets/images/mm-layout-examples.png similarity index 100% rename from docs/assets/images/mm-layout-examples.png rename to _archive_jekyll/docs/assets/images/mm-layout-examples.png diff --git a/docs/assets/images/mm-layout-single-header.png b/_archive_jekyll/docs/assets/images/mm-layout-single-header.png similarity index 100% rename from docs/assets/images/mm-layout-single-header.png rename to _archive_jekyll/docs/assets/images/mm-layout-single-header.png diff --git a/docs/assets/images/mm-layout-single-meta.png b/_archive_jekyll/docs/assets/images/mm-layout-single-meta.png similarity index 100% rename from docs/assets/images/mm-layout-single-meta.png rename to _archive_jekyll/docs/assets/images/mm-layout-single-meta.png diff --git a/docs/assets/images/mm-layout-single.png b/_archive_jekyll/docs/assets/images/mm-layout-single.png similarity index 100% rename from docs/assets/images/mm-layout-single.png rename to _archive_jekyll/docs/assets/images/mm-layout-single.png diff --git a/docs/assets/images/mm-layout-splash.png b/_archive_jekyll/docs/assets/images/mm-layout-splash.png similarity index 100% rename from docs/assets/images/mm-layout-splash.png rename to _archive_jekyll/docs/assets/images/mm-layout-splash.png diff --git a/docs/assets/images/mm-masthead-logo.png b/_archive_jekyll/docs/assets/images/mm-masthead-logo.png similarity index 100% rename from docs/assets/images/mm-masthead-logo.png rename to _archive_jekyll/docs/assets/images/mm-masthead-logo.png diff --git a/docs/assets/images/mm-paragraph-indent-example.jpg b/_archive_jekyll/docs/assets/images/mm-paragraph-indent-example.jpg similarity index 100% rename from docs/assets/images/mm-paragraph-indent-example.jpg rename to _archive_jekyll/docs/assets/images/mm-paragraph-indent-example.jpg diff --git a/docs/assets/images/mm-portfolio-collection-example.jpg b/_archive_jekyll/docs/assets/images/mm-portfolio-collection-example.jpg similarity index 100% rename from docs/assets/images/mm-portfolio-collection-example.jpg rename to _archive_jekyll/docs/assets/images/mm-portfolio-collection-example.jpg diff --git a/docs/assets/images/mm-post-date-example.png b/_archive_jekyll/docs/assets/images/mm-post-date-example.png similarity index 100% rename from docs/assets/images/mm-post-date-example.png rename to _archive_jekyll/docs/assets/images/mm-post-date-example.png diff --git a/docs/assets/images/mm-priority-plus-masthead.gif b/_archive_jekyll/docs/assets/images/mm-priority-plus-masthead.gif similarity index 100% rename from docs/assets/images/mm-priority-plus-masthead.gif rename to _archive_jekyll/docs/assets/images/mm-priority-plus-masthead.gif diff --git a/docs/assets/images/mm-read-time-example.jpg b/_archive_jekyll/docs/assets/images/mm-read-time-example.jpg similarity index 100% rename from docs/assets/images/mm-read-time-example.jpg rename to _archive_jekyll/docs/assets/images/mm-read-time-example.jpg diff --git a/docs/assets/images/mm-responsive-feature.png b/_archive_jekyll/docs/assets/images/mm-responsive-feature.png similarity index 100% rename from docs/assets/images/mm-responsive-feature.png rename to _archive_jekyll/docs/assets/images/mm-responsive-feature.png diff --git a/docs/assets/images/mm-single-header-example.jpg b/_archive_jekyll/docs/assets/images/mm-single-header-example.jpg similarity index 100% rename from docs/assets/images/mm-single-header-example.jpg rename to _archive_jekyll/docs/assets/images/mm-single-header-example.jpg diff --git a/docs/assets/images/mm-single-header-overlay-example.jpg b/_archive_jekyll/docs/assets/images/mm-single-header-overlay-example.jpg similarity index 100% rename from docs/assets/images/mm-single-header-overlay-example.jpg rename to _archive_jekyll/docs/assets/images/mm-single-header-overlay-example.jpg diff --git a/docs/assets/images/mm-single-header-overlay-fill-example.jpg b/_archive_jekyll/docs/assets/images/mm-single-header-overlay-fill-example.jpg similarity index 100% rename from docs/assets/images/mm-single-header-overlay-fill-example.jpg rename to _archive_jekyll/docs/assets/images/mm-single-header-overlay-fill-example.jpg diff --git a/docs/assets/images/mm-social-share-links-default.png b/_archive_jekyll/docs/assets/images/mm-social-share-links-default.png similarity index 100% rename from docs/assets/images/mm-social-share-links-default.png rename to _archive_jekyll/docs/assets/images/mm-social-share-links-default.png diff --git a/docs/assets/images/mm-social-share-links-reddit-color.png b/_archive_jekyll/docs/assets/images/mm-social-share-links-reddit-color.png similarity index 100% rename from docs/assets/images/mm-social-share-links-reddit-color.png rename to _archive_jekyll/docs/assets/images/mm-social-share-links-reddit-color.png diff --git a/docs/assets/images/mm-social-share-links-reddit-gs.png b/_archive_jekyll/docs/assets/images/mm-social-share-links-reddit-gs.png similarity index 100% rename from docs/assets/images/mm-social-share-links-reddit-gs.png rename to _archive_jekyll/docs/assets/images/mm-social-share-links-reddit-gs.png diff --git a/docs/assets/images/mm-staticman-pr-webhook.jpg b/_archive_jekyll/docs/assets/images/mm-staticman-pr-webhook.jpg similarity index 100% rename from docs/assets/images/mm-staticman-pr-webhook.jpg rename to _archive_jekyll/docs/assets/images/mm-staticman-pr-webhook.jpg diff --git a/docs/assets/images/mm-susy-grid-overlay.jpg b/_archive_jekyll/docs/assets/images/mm-susy-grid-overlay.jpg similarity index 100% rename from docs/assets/images/mm-susy-grid-overlay.jpg rename to _archive_jekyll/docs/assets/images/mm-susy-grid-overlay.jpg diff --git a/docs/assets/images/mm-teaser-images-example.jpg b/_archive_jekyll/docs/assets/images/mm-teaser-images-example.jpg similarity index 100% rename from docs/assets/images/mm-teaser-images-example.jpg rename to _archive_jekyll/docs/assets/images/mm-teaser-images-example.jpg diff --git a/docs/assets/images/mm-theme-fork-repo.png b/_archive_jekyll/docs/assets/images/mm-theme-fork-repo.png similarity index 100% rename from docs/assets/images/mm-theme-fork-repo.png rename to _archive_jekyll/docs/assets/images/mm-theme-fork-repo.png diff --git a/docs/assets/images/mm-theme-post-600.jpg b/_archive_jekyll/docs/assets/images/mm-theme-post-600.jpg similarity index 100% rename from docs/assets/images/mm-theme-post-600.jpg rename to _archive_jekyll/docs/assets/images/mm-theme-post-600.jpg diff --git a/docs/assets/images/mm-theme-post-750.jpg b/_archive_jekyll/docs/assets/images/mm-theme-post-750.jpg similarity index 100% rename from docs/assets/images/mm-theme-post-750.jpg rename to _archive_jekyll/docs/assets/images/mm-theme-post-750.jpg diff --git a/docs/assets/images/mm-toc-helper-example.jpg b/_archive_jekyll/docs/assets/images/mm-toc-helper-example.jpg similarity index 100% rename from docs/assets/images/mm-toc-helper-example.jpg rename to _archive_jekyll/docs/assets/images/mm-toc-helper-example.jpg diff --git a/docs/assets/images/mm-twitter-card-summary-image.jpg b/_archive_jekyll/docs/assets/images/mm-twitter-card-summary-image.jpg similarity index 100% rename from docs/assets/images/mm-twitter-card-summary-image.jpg rename to _archive_jekyll/docs/assets/images/mm-twitter-card-summary-image.jpg diff --git a/docs/assets/images/mm-twitter-card-summary-large.jpg b/_archive_jekyll/docs/assets/images/mm-twitter-card-summary-large.jpg similarity index 100% rename from docs/assets/images/mm-twitter-card-summary-large.jpg rename to _archive_jekyll/docs/assets/images/mm-twitter-card-summary-large.jpg diff --git a/docs/assets/images/mm-ui-text-labels.jpg b/_archive_jekyll/docs/assets/images/mm-ui-text-labels.jpg similarity index 100% rename from docs/assets/images/mm-ui-text-labels.jpg rename to _archive_jekyll/docs/assets/images/mm-ui-text-labels.jpg diff --git a/docs/assets/images/mstile-144x144.png b/_archive_jekyll/docs/assets/images/mstile-144x144.png similarity index 100% rename from docs/assets/images/mstile-144x144.png rename to _archive_jekyll/docs/assets/images/mstile-144x144.png diff --git a/docs/assets/images/mstile-150x150.png b/_archive_jekyll/docs/assets/images/mstile-150x150.png similarity index 100% rename from docs/assets/images/mstile-150x150.png rename to _archive_jekyll/docs/assets/images/mstile-150x150.png diff --git a/docs/assets/images/mstile-310x150.png b/_archive_jekyll/docs/assets/images/mstile-310x150.png similarity index 100% rename from docs/assets/images/mstile-310x150.png rename to _archive_jekyll/docs/assets/images/mstile-310x150.png diff --git a/docs/assets/images/mstile-310x310.png b/_archive_jekyll/docs/assets/images/mstile-310x310.png similarity index 100% rename from docs/assets/images/mstile-310x310.png rename to _archive_jekyll/docs/assets/images/mstile-310x310.png diff --git a/docs/assets/images/mstile-70x70.png b/_archive_jekyll/docs/assets/images/mstile-70x70.png similarity index 100% rename from docs/assets/images/mstile-70x70.png rename to _archive_jekyll/docs/assets/images/mstile-70x70.png diff --git a/docs/assets/images/neon-code-block.jpg b/_archive_jekyll/docs/assets/images/neon-code-block.jpg similarity index 100% rename from docs/assets/images/neon-code-block.jpg rename to _archive_jekyll/docs/assets/images/neon-code-block.jpg diff --git a/docs/assets/images/neon-skin-archive-large.png b/_archive_jekyll/docs/assets/images/neon-skin-archive-large.png similarity index 100% rename from docs/assets/images/neon-skin-archive-large.png rename to _archive_jekyll/docs/assets/images/neon-skin-archive-large.png diff --git a/docs/assets/images/neon-skin-archive.png b/_archive_jekyll/docs/assets/images/neon-skin-archive.png similarity index 100% rename from docs/assets/images/neon-skin-archive.png rename to _archive_jekyll/docs/assets/images/neon-skin-archive.png diff --git a/docs/assets/images/neon-skin-post-large.png b/_archive_jekyll/docs/assets/images/neon-skin-post-large.png similarity index 100% rename from docs/assets/images/neon-skin-post-large.png rename to _archive_jekyll/docs/assets/images/neon-skin-post-large.png diff --git a/docs/assets/images/neon-skin-post.png b/_archive_jekyll/docs/assets/images/neon-skin-post.png similarity index 100% rename from docs/assets/images/neon-skin-post.png rename to _archive_jekyll/docs/assets/images/neon-skin-post.png diff --git a/docs/assets/images/page-header-image.png b/_archive_jekyll/docs/assets/images/page-header-image.png similarity index 100% rename from docs/assets/images/page-header-image.png rename to _archive_jekyll/docs/assets/images/page-header-image.png diff --git a/docs/assets/images/page-header-og-image.png b/_archive_jekyll/docs/assets/images/page-header-og-image.png similarity index 100% rename from docs/assets/images/page-header-og-image.png rename to _archive_jekyll/docs/assets/images/page-header-og-image.png diff --git a/docs/assets/images/page-header-overlay-image.png b/_archive_jekyll/docs/assets/images/page-header-overlay-image.png similarity index 100% rename from docs/assets/images/page-header-overlay-image.png rename to _archive_jekyll/docs/assets/images/page-header-overlay-image.png diff --git a/docs/assets/images/page-header-teaser.png b/_archive_jekyll/docs/assets/images/page-header-teaser.png similarity index 100% rename from docs/assets/images/page-header-teaser.png rename to _archive_jekyll/docs/assets/images/page-header-teaser.png diff --git a/docs/assets/images/paragraph-indent.png b/_archive_jekyll/docs/assets/images/paragraph-indent.png similarity index 100% rename from docs/assets/images/paragraph-indent.png rename to _archive_jekyll/docs/assets/images/paragraph-indent.png diff --git a/docs/assets/images/paragraph-no-indent.png b/_archive_jekyll/docs/assets/images/paragraph-no-indent.png similarity index 100% rename from docs/assets/images/paragraph-no-indent.png rename to _archive_jekyll/docs/assets/images/paragraph-no-indent.png diff --git a/docs/assets/images/plum-code-block.jpg b/_archive_jekyll/docs/assets/images/plum-code-block.jpg similarity index 100% rename from docs/assets/images/plum-code-block.jpg rename to _archive_jekyll/docs/assets/images/plum-code-block.jpg diff --git a/docs/assets/images/plum-skin-archive-large.png b/_archive_jekyll/docs/assets/images/plum-skin-archive-large.png similarity index 100% rename from docs/assets/images/plum-skin-archive-large.png rename to _archive_jekyll/docs/assets/images/plum-skin-archive-large.png diff --git a/docs/assets/images/plum-skin-archive.png b/_archive_jekyll/docs/assets/images/plum-skin-archive.png similarity index 100% rename from docs/assets/images/plum-skin-archive.png rename to _archive_jekyll/docs/assets/images/plum-skin-archive.png diff --git a/docs/assets/images/plum-skin-post-large.png b/_archive_jekyll/docs/assets/images/plum-skin-post-large.png similarity index 100% rename from docs/assets/images/plum-skin-post-large.png rename to _archive_jekyll/docs/assets/images/plum-skin-post-large.png diff --git a/docs/assets/images/plum-skin-post.png b/_archive_jekyll/docs/assets/images/plum-skin-post.png similarity index 100% rename from docs/assets/images/plum-skin-post.png rename to _archive_jekyll/docs/assets/images/plum-skin-post.png diff --git a/docs/assets/images/safari-pinned-tab.svg b/_archive_jekyll/docs/assets/images/safari-pinned-tab.svg similarity index 100% rename from docs/assets/images/safari-pinned-tab.svg rename to _archive_jekyll/docs/assets/images/safari-pinned-tab.svg diff --git a/docs/assets/images/search-layout-example.png b/_archive_jekyll/docs/assets/images/search-layout-example.png similarity index 100% rename from docs/assets/images/search-layout-example.png rename to _archive_jekyll/docs/assets/images/search-layout-example.png diff --git a/docs/assets/images/site-logo.png b/_archive_jekyll/docs/assets/images/site-logo.png similarity index 100% rename from docs/assets/images/site-logo.png rename to _archive_jekyll/docs/assets/images/site-logo.png diff --git a/docs/assets/images/solarized-light-code-block.jpg b/_archive_jekyll/docs/assets/images/solarized-light-code-block.jpg similarity index 100% rename from docs/assets/images/solarized-light-code-block.jpg rename to _archive_jekyll/docs/assets/images/solarized-light-code-block.jpg diff --git a/docs/assets/images/sunrise-code-block.jpg b/_archive_jekyll/docs/assets/images/sunrise-code-block.jpg similarity index 100% rename from docs/assets/images/sunrise-code-block.jpg rename to _archive_jekyll/docs/assets/images/sunrise-code-block.jpg diff --git a/docs/assets/images/sunrise-skin-archive-large.png b/_archive_jekyll/docs/assets/images/sunrise-skin-archive-large.png similarity index 100% rename from docs/assets/images/sunrise-skin-archive-large.png rename to _archive_jekyll/docs/assets/images/sunrise-skin-archive-large.png diff --git a/docs/assets/images/sunrise-skin-archive.png b/_archive_jekyll/docs/assets/images/sunrise-skin-archive.png similarity index 100% rename from docs/assets/images/sunrise-skin-archive.png rename to _archive_jekyll/docs/assets/images/sunrise-skin-archive.png diff --git a/docs/assets/images/sunrise-skin-post-large.png b/_archive_jekyll/docs/assets/images/sunrise-skin-post-large.png similarity index 100% rename from docs/assets/images/sunrise-skin-post-large.png rename to _archive_jekyll/docs/assets/images/sunrise-skin-post-large.png diff --git a/docs/assets/images/sunrise-skin-post.png b/_archive_jekyll/docs/assets/images/sunrise-skin-post.png similarity index 100% rename from docs/assets/images/sunrise-skin-post.png rename to _archive_jekyll/docs/assets/images/sunrise-skin-post.png diff --git a/docs/assets/images/unsplash-gallery-image-1-th.jpg b/_archive_jekyll/docs/assets/images/unsplash-gallery-image-1-th.jpg similarity index 100% rename from docs/assets/images/unsplash-gallery-image-1-th.jpg rename to _archive_jekyll/docs/assets/images/unsplash-gallery-image-1-th.jpg diff --git a/docs/assets/images/unsplash-gallery-image-1.jpg b/_archive_jekyll/docs/assets/images/unsplash-gallery-image-1.jpg similarity index 100% rename from docs/assets/images/unsplash-gallery-image-1.jpg rename to _archive_jekyll/docs/assets/images/unsplash-gallery-image-1.jpg diff --git a/docs/assets/images/unsplash-gallery-image-2-th.jpg b/_archive_jekyll/docs/assets/images/unsplash-gallery-image-2-th.jpg similarity index 100% rename from docs/assets/images/unsplash-gallery-image-2-th.jpg rename to _archive_jekyll/docs/assets/images/unsplash-gallery-image-2-th.jpg diff --git a/docs/assets/images/unsplash-gallery-image-2.jpg b/_archive_jekyll/docs/assets/images/unsplash-gallery-image-2.jpg similarity index 100% rename from docs/assets/images/unsplash-gallery-image-2.jpg rename to _archive_jekyll/docs/assets/images/unsplash-gallery-image-2.jpg diff --git a/docs/assets/images/unsplash-gallery-image-3-th.jpg b/_archive_jekyll/docs/assets/images/unsplash-gallery-image-3-th.jpg similarity index 100% rename from docs/assets/images/unsplash-gallery-image-3-th.jpg rename to _archive_jekyll/docs/assets/images/unsplash-gallery-image-3-th.jpg diff --git a/docs/assets/images/unsplash-gallery-image-3.jpg b/_archive_jekyll/docs/assets/images/unsplash-gallery-image-3.jpg similarity index 100% rename from docs/assets/images/unsplash-gallery-image-3.jpg rename to _archive_jekyll/docs/assets/images/unsplash-gallery-image-3.jpg diff --git a/docs/assets/images/unsplash-gallery-image-4-th.jpg b/_archive_jekyll/docs/assets/images/unsplash-gallery-image-4-th.jpg similarity index 100% rename from docs/assets/images/unsplash-gallery-image-4-th.jpg rename to _archive_jekyll/docs/assets/images/unsplash-gallery-image-4-th.jpg diff --git a/docs/assets/images/unsplash-gallery-image-4.jpg b/_archive_jekyll/docs/assets/images/unsplash-gallery-image-4.jpg similarity index 100% rename from docs/assets/images/unsplash-gallery-image-4.jpg rename to _archive_jekyll/docs/assets/images/unsplash-gallery-image-4.jpg diff --git a/docs/assets/images/unsplash-image-1.jpg b/_archive_jekyll/docs/assets/images/unsplash-image-1.jpg similarity index 100% rename from docs/assets/images/unsplash-image-1.jpg rename to _archive_jekyll/docs/assets/images/unsplash-image-1.jpg diff --git a/docs/assets/images/unsplash-image-10.jpg b/_archive_jekyll/docs/assets/images/unsplash-image-10.jpg similarity index 100% rename from docs/assets/images/unsplash-image-10.jpg rename to _archive_jekyll/docs/assets/images/unsplash-image-10.jpg diff --git a/docs/assets/images/unsplash-image-11.jpg b/_archive_jekyll/docs/assets/images/unsplash-image-11.jpg similarity index 100% rename from docs/assets/images/unsplash-image-11.jpg rename to _archive_jekyll/docs/assets/images/unsplash-image-11.jpg diff --git a/docs/assets/images/unsplash-image-2.jpg b/_archive_jekyll/docs/assets/images/unsplash-image-2.jpg similarity index 100% rename from docs/assets/images/unsplash-image-2.jpg rename to _archive_jekyll/docs/assets/images/unsplash-image-2.jpg diff --git a/docs/assets/images/unsplash-image-3.jpg b/_archive_jekyll/docs/assets/images/unsplash-image-3.jpg similarity index 100% rename from docs/assets/images/unsplash-image-3.jpg rename to _archive_jekyll/docs/assets/images/unsplash-image-3.jpg diff --git a/docs/assets/images/unsplash-image-4.jpg b/_archive_jekyll/docs/assets/images/unsplash-image-4.jpg similarity index 100% rename from docs/assets/images/unsplash-image-4.jpg rename to _archive_jekyll/docs/assets/images/unsplash-image-4.jpg diff --git a/docs/assets/images/unsplash-image-5.jpg b/_archive_jekyll/docs/assets/images/unsplash-image-5.jpg similarity index 100% rename from docs/assets/images/unsplash-image-5.jpg rename to _archive_jekyll/docs/assets/images/unsplash-image-5.jpg diff --git a/docs/assets/images/unsplash-image-6.jpg b/_archive_jekyll/docs/assets/images/unsplash-image-6.jpg similarity index 100% rename from docs/assets/images/unsplash-image-6.jpg rename to _archive_jekyll/docs/assets/images/unsplash-image-6.jpg diff --git a/docs/assets/images/unsplash-image-7.jpg b/_archive_jekyll/docs/assets/images/unsplash-image-7.jpg similarity index 100% rename from docs/assets/images/unsplash-image-7.jpg rename to _archive_jekyll/docs/assets/images/unsplash-image-7.jpg diff --git a/docs/assets/images/unsplash-image-8.jpg b/_archive_jekyll/docs/assets/images/unsplash-image-8.jpg similarity index 100% rename from docs/assets/images/unsplash-image-8.jpg rename to _archive_jekyll/docs/assets/images/unsplash-image-8.jpg diff --git a/docs/assets/images/unsplash-image-9.jpg b/_archive_jekyll/docs/assets/images/unsplash-image-9.jpg similarity index 100% rename from docs/assets/images/unsplash-image-9.jpg rename to _archive_jekyll/docs/assets/images/unsplash-image-9.jpg diff --git a/docs/screenshot-layouts.png b/_archive_jekyll/docs/screenshot-layouts.png similarity index 100% rename from docs/screenshot-layouts.png rename to _archive_jekyll/docs/screenshot-layouts.png diff --git a/docs/screenshot.png b/_archive_jekyll/docs/screenshot.png similarity index 100% rename from docs/screenshot.png rename to _archive_jekyll/docs/screenshot.png diff --git a/favicon.ico b/_archive_jekyll/favicon.ico similarity index 100% rename from favicon.ico rename to _archive_jekyll/favicon.ico diff --git a/image/avatar/zhuxiang-smile.jpg b/_archive_jekyll/image/avatar/zhuxiang-smile.jpg similarity index 100% rename from image/avatar/zhuxiang-smile.jpg rename to _archive_jekyll/image/avatar/zhuxiang-smile.jpg diff --git a/index.html b/_archive_jekyll/index.html similarity index 100% rename from index.html rename to _archive_jekyll/index.html diff --git a/minimal-mistakes-jekyll.gemspec b/_archive_jekyll/minimal-mistakes-jekyll.gemspec similarity index 100% rename from minimal-mistakes-jekyll.gemspec rename to _archive_jekyll/minimal-mistakes-jekyll.gemspec diff --git a/package.json b/_archive_jekyll/package.json similarity index 100% rename from package.json rename to _archive_jekyll/package.json diff --git a/screenshot-layouts.png b/_archive_jekyll/screenshot-layouts.png similarity index 100% rename from screenshot-layouts.png rename to _archive_jekyll/screenshot-layouts.png diff --git a/screenshot.png b/_archive_jekyll/screenshot.png similarity index 100% rename from screenshot.png rename to _archive_jekyll/screenshot.png diff --git a/_archive_jekyll/staticman.yml b/_archive_jekyll/staticman.yml new file mode 100644 index 00000000..61b95925 --- /dev/null +++ b/_archive_jekyll/staticman.yml @@ -0,0 +1,104 @@ +# Name of the property. You can have multiple properties with completely +# different config blocks for different sections of your site. +# For example, you can have one property to handle comment submission and +# another one to handle posts. +# To encrypt strings use the following endpoint: +# https://{your Staticman API URL}/v[2|3]/encrypt/{TEXT TO BE ENCRYPTED} + +comments: + # (*) REQUIRED + # + # Names of the fields the form is allowed to submit. If a field that is + # not here is part of the request, an error will be thrown. + allowedFields: ["name", "email", "url", "message"] + + # (*) REQUIRED WHEN USING NOTIFICATIONS + # + # When allowedOrigins is defined, only requests sent from one of the domains + # listed will be accepted. The origin is sent as part as the `options` object + # (e.g. {{ lang.t("meta.comments") }} + + + + + +{% endif %} diff --git a/docs/posts/2018/2018-05-03-setting-up-github-pages-with-custom-domain-over-https.md b/docs/posts/2018/2018-05-03-setting-up-github-pages-with-custom-domain-over-https.md new file mode 100644 index 00000000..b6ba4621 --- /dev/null +++ b/docs/posts/2018/2018-05-03-setting-up-github-pages-with-custom-domain-over-https.md @@ -0,0 +1,105 @@ +--- +authors: +- copdips +categories: +- github +- web +comments: true +date: + created: 2018-05-03 +description: With Github pages, we can create our blogs in our own domain over HTTPS + completely free. Of course you should pay for your domain name at the Registrar. +--- + +# Setting up Github Pages With custom domain over HTTPS + +With Github pages, we can create our blogs in our own domain over HTTPS completely free. Of course you should pay for your domain name at the Registrar. + + + +## Create Github pages on Github.com + +1. On Github create a repo with name : githubUserName.github.io +2. Push a file `index.html` to branch `master` or `gh-pages` +3. Now you can access your github page by going to githubUserName.github.io + +From now on, you've created a fully operational blog on http://githubUserName.github.io, you can also enable HTTPS on it by going to the repo's settings menu, everything is free. + +If you dont need to use a custom domain like http://yourname.com, you can stop here, but if you want it, please go ahead. + +## Register a custom domain + +Register a custom domain on your preferred domain name registrar + +## Setup DNS on DNS registrar + +1. Add subdomain + + + + - Add a **CNAME** DNS record pointing **www** to **copdips.github.io** + - Add a **CNAME** DNS record pointing **blog** to **copdips.github.io** + +1. Add APEX domain + + My DNS registrar doesn't support [ALIAS nor ANAME](https://help.github.com/articles/setting-up-an-apex-domain/#configuring-an-alias-or-aname-record-with-your-dns-provider), I should go with the [A records](https://help.github.com/articles/setting-up-an-apex-domain/#configuring-a-records-with-your-dns-provider) : + + - Add a **A** DNS record pointing **@** to **185.199.108.153** + - Add a **A** DNS record pointing **@** to **185.199.109.153** + - Add a **A** DNS record pointing **@** to **185.199.110.153** + - Add a **A** DNS record pointing **@** to **185.199.111.153** + +## Enable custom domain on Github.com + +1. Go to github repo + + + +1. Add your custom domain in : Settings -> Options -> GitHub Pages -> Custom domain + - If you'll just run a blog on your domain, I suggest to use [`APEX domain`](https://help.github.com/articles/setting-up-an-apex-domain-and-www-subdomain/) name here instead of subdomain, for example: yourdomain.com + - This step creates implicitly a file named **CNAME** under the root of your git repo, the content of the file CNAME is just your custom domain. + - The commit message is 'Create CNAME' +1. On the same page, the option `Enable HTTPS` serves to redirect HTTP traffic to HTTPS. The option is grayed out for the moment, because initially https://yourdomain.github.io is binded with a github's certificate so as to https://youdomain.com. In order to secure correctly your new site https://youdomain.com, Github needs to ask [LetsEncrypt](https://letsencrypt.org) to issue a new certificate where the [CN](https://en.wikipedia.org/wiki/Certificate_signing_request) is youdomain.com, than when people visit your web site, they will see [a green padlock](https://support.mozilla.org/en-US/kb/how-do-i-tell-if-my-connection-is-secure) in the address bar. The generation of LetsEncryt certificate takes usually 1 or 2 days, be patient, once you see a green lock when you open https://youdomain.com, you can come back here and enable the option `Enable HTTPS`. + +## Enable HTTPS for custom domain with Cloudflare + +> This solution is `partially deprecated` as [Github supports natively HTTPS for custom domains](#enable-https-for-custom-domain-with-github) now, but Github pages doesn't provide the wildcard certificate yet. For a better compatibility, Cloudflare HTTPS solution is still one of the best choices. + +Some tutorials : +[tutorial 1](https://hackernoon.com/set-up-ssl-on-github-pages-with-custom-domains-for-free-a576bdf51bc) +, +[tutorial 2](https://www.jonathan-petitcolas.com/2017/01/13/using-https-with-custom-domain-name-on-github-pages.html) + +Simplified steps : + +1. Sign up for a free Cloudflare Account +1. Follow the wizard, give your custom domain, Cloudflare should find all your CNAME and A records. +1. Cloudflare should ask you to change your custom domain's default DNS servers given by your DNS registrar to the Cloudflare ones. + - The change may take several hours to take effect + - Cloudflare DNS example: vida.ns.cloudflare.com, zod.ns.cloudflare.com +1. Go to `Crypto` tab, verify SSL is set to Full +1. Go to `Page Rules` tab, add a page rule : http://*customdomain.com/* with `Always Use HTTPS` + +If everything goes well, you can access your custom domain by HTTPS. And if you verify the HTTPS certificate, it should be signed by COMODO, the certificate's CN is a cloudflare.com server and one of the SAN is your custom domain. + +## Enable HTTPS for custom domain With Github + +Github announced very recently (on May 01, 2018) [the support of HTTPS for custom domains](https://blog.github.com/2018-05-01-github-pages-custom-domains-https/), this is really a great feature. After the test, I found that the HTTPS certificate is signed by letsencrypt.org where the CN is [your github.io's CNAME](#enable-custom-domain-on-githubcom), and everything is free. Thx Github and LetsEncrypt ! + +You can also enable the HTTP to HTTPS automatic redirection from here. + +If you use subdomain (for ex: www.copdips.com), hereunder the HTTPS tests : + +- typed http://copdips.com, redirected to https://www.copdips.com +- typed http://www.copdips.com, redirected to https://www.copdips.com +- typed https://copdips.com, redirected to the same https://copdips.com with a certificate error, as LetsEncrypt only signed to www.copdips.com in the CN. + + > With [Cloudflare's HTTPS solution](#enable-https-for-custom-domain-with-cloudflare), there's no such error, as Cloudflare signed a wildcard certificate to *.copdips.com in the SAN. + +If you use APEX domain (for ex: copdips.com), hereunder the HTTPS tests : +- typed http://copdips.com, redirected to https://copdips.com +- typed http://www.copdips.com, redirected to https://copdips.com +- typed https://copdips.com, redirected https://copdips.com +- typed https://www.copdips.com, redirected to https://copdips.com + + > With APEX domain, everything is good on HTTPS with native Github solution, you dont need Cloudflare diff --git a/docs/posts/2018/2018-05-07-setting-up-powershell-gallery-and-nuget-gallery-for-powershell.md b/docs/posts/2018/2018-05-07-setting-up-powershell-gallery-and-nuget-gallery-for-powershell.md new file mode 100644 index 00000000..ca86a9ba --- /dev/null +++ b/docs/posts/2018/2018-05-07-setting-up-powershell-gallery-and-nuget-gallery-for-powershell.md @@ -0,0 +1,182 @@ +--- +authors: +- copdips +categories: +- powershell +- package +- proxy +comments: true +date: + created: 2018-05-07 +description: As like [pypi](https://pypi.org/) for Python, [npm](https://www.npmjs.com/) + for Node.js, we also have [Powershell Gallery](https://www.powershellgallery.com/) + and Nuget Gallery for Powershell. +--- + +# Setting Up Powershell gallery And Nuget gallery + +As like [pypi](https://pypi.org/) for Python, [npm](https://www.npmjs.com/) for Node.js, we also have [Powershell Gallery](https://www.powershellgallery.com/) for Powershell to add some extra Powershell modules, and [Nuget Gallery](https://www.nuget.org/) for Powershell to add some extra executables. + + + +## Powershell version + +!!! note + + All commands provided here are tested on Windows 10 with Windows Powershell v5.1. + +## Configure proxy in Powershell + +Both Powershell Gallery et Nuget Gallery can be installed locally that we don't need external Internet access to retrieve the packages from them, but setting up an internal Powershell Gallery or an internal Nuget Gallery is [out of scope of this post](#set-up-internal-powershell-gallery-or-nuget-gallery). + +To use the [public Powershell Gallery](https://www.powershellgallery.com/) or the [public Nuget Gallery](https://www.nuget.org/), you must have Internet access. If you're at the office, your computer is probably behind a company proxy to access Internet. If your Internet Explorer's proxy setting has already been configured, you can use the below command to tell Powershell to reuse the same proxy setting : + +```powershell +(New-Object -TypeName System.Net.WebClient).Proxy.Credentials = [System.Net.CredentialCache]::DefaultNetworkCredentials + +# Or batch version by using netsh (need admin privileges) : +netsh winhttp show proxy +netsh winhttp import proxy source=ie +``` + +I suggest to add the above command in your powershell profile, otherwise you should type it each time you open a new Powershell session. + +Your Windows Powershell profile can be found at four locations: + +```powershell +$PROFILE | gm | ? membertype -eq noteproperty +``` + +The output of the above command : + +```powershell +# For Windows Powershell : + + TypeName:System.String + +Name MemberType Definition +---- ---------- ---------- +AllUsersAllHosts NoteProperty string AllUsersAllHosts=C:\Windows\System32\WindowsPowerShell\v1.0\profile.ps1 +AllUsersCurrentHost NoteProperty string AllUsersCurrentHost=C:\Windows\System32\WindowsPowerShell\v1.0\Microsoft.PowerShell_profile.ps1 +CurrentUserAllHosts NoteProperty string CurrentUserAllHosts=d:\xiang\Documents\WindowsPowerShell\profile.ps1 +CurrentUserCurrentHost NoteProperty string CurrentUserCurrentHost=d:\xiang\Documents\WindowsPowerShell\Microsoft.PowerShell_profile.ps1 +``` + +The two CurrentUser profile locations might differ on different computers, all depends on your `MyDocuments` location ( `[Environment]::GetFolderPath("MyDocuments")` ), and if you're using Powershell Core, all the four locations are different than the ones in Windows Powershell. +I use usually `CurrentUserAllHost` because the change will only affect my profile, and even if I'm not the admin of the computer, I can still get it work. The profile location can be found at : + +```powershell +$PROFILE | % CurrentUserAllHosts +``` + +Add proxy setting in the end of your `CurrentUserAllHost` powershell profile : + +```powershell +Add-Content ($PROFILE | % CurrentUserAllHost) "`n(New-Object -TypeName System.Net.WebClient).Proxy.Credentials = [System.Net.CredentialCache]::DefaultNetworkCredentials`n" +``` + +As a best practice, it would be better to add the above line at the top of your profile. + +## Set up Powershell Gallery for Powershell + +This is pretty easy for Powershell v5+ : + +```powershell +# I add the switch Trusted because I trust all the modules and scripts from Powershell Gallery +Register-PSRepository -Default -InstallationPolicy Trusted +``` + +For Powershell with version less than v5: + +```powershell +Register-PSRepository -Name PSGallery -SourceLocation https://www.powershellgallery.com/api/v2/ -InstallationPolicy Trusted +``` + +Test : +```powershell +> Get-PSRepository + +Name InstallationPolicy SourceLocation +---- ------------------ -------------- +PSGallery Trusted https://www.powershellgallery.com/api/v2/ +``` + +## Use Powershell Gallery + +```powershell +# Search a module which name is like poshrs* +> find-module poshrs* + +Name Version Source Summary +---- ------- ------ ------- +PoshRSJob 1.7.4.4 PSGallery Provides an alternative to PSjobs with greater performance and less overhead to run commands in ... + +# Install the module without admin privileges +> find-module poshrs* | install-module -Scope CurrentUser +``` + +## Set up Nuget for Powershell + +[Nuget](https://docs.microsoft.com/en-us/nuget/) is well-known among the Windows developers. + +```powershell +# I also add the Trusted switch +Register-PackageSource -Name Nuget -Location "http://www.nuget.org/api/v2" –ProviderName Nuget -Trusted +``` + +My Nuget client is at v2, so I can only target at Nuget v2 API. + +```powershell +> Get-PackageProvider + +Name Version DynamicOptions +---- ------- -------------- +msi 3.0.0.0 AdditionalArguments +msu 3.0.0.0 +NuGet 2.8.5.208 Destination, ExcludeVersion, Scope, SkipDependencies, Headers, FilterOnTag, ... +PowerShellGet 1.0.0.1 PackageManagementProvider, Type, Scope, AllowClobber, SkipPublisherCheck, In... +Programs 3.0.0.0 IncludeWindowsInstaller, IncludeSystemComponent +``` + +Test : + +```powershell +> Get-PackageSource + +Name ProviderName IsTrusted Location +---- ------------ --------- -------- +Nuget NuGet True http://www.nuget.org/api/v2 +PSGallery PowerShellGet True https://www.powershellgallery.com/api/v2/ +``` + +## Use Nuget + +```powershell +# install the latest version of GitForWindows without admin privileges +find-package gitforwindows | install-package -Scope CurrentUser + +# install the latest version of Python without admin privileges +find-package python | install-package -Scope CurrentUser + +# find the path of Python installation +get-package python | % source + +# You need to add manually the package executable path to your USER PATH. +# To get the current USER Path +[System.Environment]::GetEnvironmentVariable('Path', 'User') + +# To set the current USER Path +[System.Environment]::SetEnvironmentVariable('Path', $newPathInSingleStringSeparatedByColumn, 'User') +``` + +> In fact, you can find out from the output of `Get-PackageSource` that `Find-Package` can search the packages and modules in both Nuget Gallery and Powershell Gallery. + +## Set up internal Powershell Gallery or Nuget Gallery + +Some resources on setting up internal Powershell Gallery and Nuget Gallery: + +1. [Setting up an Internal PowerShellGet Repository](https://blogs.msdn.microsoft.com/powershell/2014/05/20/setting-up-an-internal-powershellget-repository/) +1. [Powershell: Your first internal PSScript repository](https://kevinmarquette.github.io/2017-05-30-Powershell-your-first-PSScript-repository/) +1. [PowerShell/PSPrivateGallery](https://github.com/PowerShell/PSPrivateGallery) +1. [Overview of Hosting Your Own NuGet Feeds](https://docs.microsoft.com/en-us/nuget/hosting-packages/overview) +1. [NuGet/NuGetGallery](https://github.com/NuGet/NuGetGallery/wiki/Hosting-the-NuGet-Gallery-Locally-in-IIS) diff --git a/docs/posts/2018/2018-05-16-powershell-stop-parsing.md b/docs/posts/2018/2018-05-16-powershell-stop-parsing.md new file mode 100644 index 00000000..816aa77e --- /dev/null +++ b/docs/posts/2018/2018-05-16-powershell-stop-parsing.md @@ -0,0 +1,133 @@ +--- +authors: +- copdips +categories: +- powershell +comments: true +date: + created: 2018-05-16 +description: Use Powershell stop-parsing (`--%`) to treat the remaining characters + in the line as a literal. +--- + +# Powershell stop-parsing (`--%`) + +A friend of mine told me about the Powershell stop-parsing (\-\-%) last year, he said the stop-parsing tells powershell to treat the remaining characters in the line as a literal, but I'd never known where to use it. Recently working on git ssh made it happened. + + + +The use case is I needed to git push by using a ssh key instead of the https wincred. So at first I needed to generate a ssh key pair. I used the `ssh-keygen.exe` provided by [GitForWindows](https://copdips.com/2018/05/setting-up-powershell-gallery-and-nuget-gallery-for-powershell.html#use-nuget). + +To generate a ssh key pair from Powershell : + +```powershell +> ssh-keygen.exe -t rsa -b 4096 -C "your_email@example.com" + +Eenerating public/private rsa key pair. +Enter file in which to save the key (/c/Users/xiang/.ssh/id_rsa): +nter passphrase (empty for no passphrase): +Enter same passphrase again: +Your identification has been saved in /c/Users/xiang/.ssh/id_rsa. +Your public key has been saved in /c/Users/xiang/.ssh/id_rsa.pub. +The key fingerprint is: +SHA256:msbOTbVaHD2W3BNBmhxHkpJ7FWhdLhzFWj8Q0IDAiU0 xiang.zhu@outlook.com +The key's randomart image is: ++---[RSA 4096]----+ +| =Eo .+*BO=o| +| . + .o.+Xo+o| +| ++.=oo| +| .o.o.+.| +| S o.* o .| +| . o o + . . | +| = . + | +| + o o | +| o o | ++----[SHA256]-----+ +``` + +Press twice enter will create the key pair (id_rsa and id_rsa.public) without passphrase in the default Windows SSH keys' location `Join-Path $env:HOMEDRIVE $env:HOMEPATH | Join-Path -ChildPath .ssh` which is at `C:\Users\xiang\.ssh` on my computer. + +It is highly recommended to secure your SSH key by a passphrase : `ssh-keygen -N 'yourPassphraseHere'`. + + + +Everything works well till now, and the command ssh-keygen is easy to use. But the real use case is to generate the ssh key pair on a remote Windows server. I thought it should be easy too. Just install GitForWindows on the remote Windows server, add the Git paths to the user's env PATH (I'm not admin on the remote server), and run the same ssh-keygen command ? Imagination remains imagination, let's see the real world : + +```powershell +[RemoteServer]: PS> ssh-keygen.exe -t rsa -b 4096 -C "your_email@example.com" +Generating public/private rsa key pair. +Enter file in which to save the key (/c/Users/Administrator/.ssh/id_rsa): +[RemoteServer]: PS> +[RemoteServer]: PS>Test-Path C:\Users\Administrator\.ssh +False +[RemoteServer]: PS> +``` + +Hmm... it seems that remote PsSession doesn't support ssh-keygen's interactive prompt dialog. It closed the prompt without giving me the chance to talk with ssh-keygen. Never mind, `ssh-keygen --help` shows me what is the one-line command without prompt : + +```powershell +# param --help doesnt exist +> ssh-keygen --help +ssh-keygen: unknown option -- - +usage: ssh-keygen [-q] [-b bits] [-t dsa | ecdsa | ed25519 | rsa] + [-N new_passphrase] [-C comment] [-f output_keyfile] + ssh-keygen -p [-P old_passphrase] [-N new_passphrase] [-f keyfile] + ... +``` + +The first one from the above help file seems good, let's try it out : + +```powershell +[RemoteServer]: PS> ssh-keygen -q -t rsa -b 4096 -N '' -C 'xiang.zhu@outlook.com' -f C:\Users\xiang\.ssh\id_rsa + +Too many arguments. +usage: ssh-keygen [-q] [-b bits] [-t dsa | ecdsa | ed25519 | rsa] + [-N new_passphrase] [-C comment] [-f output_keyfile] + ... +``` + +Still failed, but this time it threw `Too many arguments` error, very strange, all the arguments are valid as per ssh-keygen's help. + + Searched on Google, finally found that someone raised already [an issue](https://github.com/PowerShell/Win32-OpenSSH/issues/1017) on PowerShell/Win32-OpenSSH github repo. It is because Powershell thinks `-f` is a powershell native parameter for [`parsing`](https://docs.microsoft.com/en-us/powershell/module/microsoft.powershell.core/about/about_parsing?view=powershell-6). + +For example, to parse a DateTime to a sortable string : + +```powershell +> '{0:s}' -f (Get-Date) +2018-05-15T20:41:55 +``` + +So I added the stop-parsing symbol `--%` just after ssh-keygen.exe, and my ssh keys are managed to be created : +```powershell +[RemoteServer]: PS> ssh-keygen.exe --% -q -t rsa -b 4096 -N '' -C 'xiang.zhu@outlook.com' -f C:\Users\administrator\.ssh\id_rsa +ssh-keygen.exe : Saving key "C:\\Users\\administrator\\.ssh\\id_rsa" failed: No such file or directory + + CategoryInfo : NotSpecified: (Saving key "C:\...le or directory:String) [], RemoteException + + FullyQualifiedErrorId : NativeCommandError + +# I need to create the folder .ssh in advance +[RemoteServer]: PS> md C:\Users\administrator\.ssh + + Directory: C:\Users\administrator + +Mode LastWriteTime Length Name +---- ------------- ------ ---- +d----- 5/15/2018 8:35 PM .ssh + +[RemoteServer]: PS> ssh-keygen.exe --% -q -t rsa -b 4096 -N '' -C 'xiang.zhu@outlook.com' -f C:\Users\administrator\.ssh\id_rsa +[RemoteServer]: PS> gci C:\Users\Administrator\.ssh + + Directory: C:\Users\Administrator\.ssh + +Mode LastWriteTime Length Name +---- ------------- ------ ---- +-a---- 5/15/2018 8:36 PM 3243 id_rsa +-a---- 5/15/2018 8:36 PM 747 id_rsa.pub + +[RemoteServer]: PS> +``` + +Some references on stop-parsing (not many resources on Internet): + +- https://docs.microsoft.com/en-us/powershell/module/microsoft.powershell.core/about/about_parsing?view=powershell-6 +- https://ss64.com/ps/stop-parsing.html +- https://rkeithhill.wordpress.com/2012/01/02/powershell-v3-ctp2-provides-better-argument-passing-to-exes/ diff --git a/docs/posts/2018/2018-05-19-setting-up-jekyll-with-minimal-mistakes-theme-on-windows.md b/docs/posts/2018/2018-05-19-setting-up-jekyll-with-minimal-mistakes-theme-on-windows.md new file mode 100644 index 00000000..5fb77572 --- /dev/null +++ b/docs/posts/2018/2018-05-19-setting-up-jekyll-with-minimal-mistakes-theme-on-windows.md @@ -0,0 +1,288 @@ +--- +authors: +- copdips +categories: +- jekyll +- web +comments: true +date: + created: 2018-05-19 + updated: 2022-08-14 +description: Preview Jekyll blog locally on Windows with the Minimal Mistakes theme. +--- + +# Setting up Jekyll with Minimal Mistakes theme on Windows + +Do you want to preview Jekyll blog locally on Windows before publishing it to Internet? Many online tutorials about setting up Jekyll on Windows are out of date, I will show you in this post the 2018 version and with the Minimal Mistakes theme. + + + +## Some online tutorials + +- +- +- + +## Install Ruby and Devkit on Windows + +Jekyll is writted in Ruby, to preview Jekyll blog content, we need to install Ruby and Ruby DevKit. + +Which Development Kit? + + + +> + rubyinstaller.org: Starting with Ruby 2.4.0 we use the MSYS2 toolchain as our development kit. When using the Ruby+Devkit installer version, it is a selectable component, so that no additional downloads/installs are required. + + + +> + When using the Ruby without Devkit version, the MSYS2 Devkit can be installed separately by running ridk install. MSYS2 is required to build native C/C++ extensions for Ruby and is necessary for Ruby on Rails. Moreover it allows the download and usage of hundreds of Open Source libraries which Ruby gems can depend on. + + + +Download and install the **Ruby+DevKit** from the **with Devkit** part of the following downloads page: + + +## Install Jekyll Ruby package and its dependencies + +Ruby uses [gem](https://rubygems.org/) to install the Ruby packages. + +Change gem source if default banned in China: + +```ruby +gem sources --add https://ruby.taobao.org/ --remove https://rubygems.org/ +``` + +To install the basic Jekyll environment, open a Powershell console: + +```ruby +> gem install bundler +> gem install jekyll +... +Done installing documentation for public_suffix, addressable, colorator, http_parser.rb, eventmachine, em-websocket, concurrent-ruby, i18n, rb-fsevent, ffi, rb-inotify, sass-listen, sass, jekyll-sass-converter, ruby_dep, listen, jekyll-watch, kramdown, liquid, mercenary, forwardable-extended, pathutil, rouge, safe_yaml, jekyll after 55 seconds +25 gems installed +``` + +## Choose a theme + +Googling will give you many Jekyll theme, this blog is using the [**minimal-mistakes theme**](https://mmistakes.github.io/minimal-mistakes/about/), + +By using the procedure provided by the [quick start guide](https://mmistakes.github.io/minimal-mistakes/docs/quick-start-guide/) of the minimal mistakes theme, we can install all the Jekyll dependencies + +## Customize the theme + +### The _config.yml file + +All the global configurations are set here, this is your starting point + +### Add Disqus comment system + +1. Create an account on +2. Create a shortname on : +3. Edit file `_config.yml` + +```yml +comments: + provider : "disqus" # false (default), "disqus", "discourse", "facebook", "google-plus", "staticman", "staticman_v2" "custom" + disqus: + shortname : "the shortname created in step 2" +``` + +If you want to enable comment system by default on all the blog posts, set `comments` in defaults part of _config.yml to `true` : +```yml +# Defaults +defaults: + # _posts + - scope: + path: "" + type: posts + values: + layout: single + author_profile: true + read_time: true + comments: true + share: true + related: true +``` + +## Default page layout + +In _config.yml, I chose `single` as my post default layout style. + +The layout can be found at : `_layouts\single.html` + +### Add update date in each post under the post title + +Add `last_modified_at:` in the post headers. + +### Per page layout + +On the top of the post, you can add your [YAML Front Matter](https://jekyllrb.com/docs/frontmatter/): +```yml +--- +layout: single +title: "Setting Up Powershell Gallery And Nuget Gallery" # title shown in home page +excerpt: "As like [pypi](https://pypi.org/) for Python, [npm](https://www.npmjs.com/) for Node.js, we also have [Powershell Gallery](https://www.powershellgallery.com/) for Powershell to add some extra Powershell modules, and [Nuget Gallery](https://www.nuget.org/) for Powershell to add some extra executables." # excerpt shown in home page under title +permalink: # global permalink is set in_config.yml +tags: + - nuget + - powershell + - powershell gallery + - proxy +published: true +comments: true +author_profile: true +# header: +# teaserlogo: +# teaser: '' +# image: '' +# caption: +gallery: + - image_path: '' + url: '' + title: '' +--- +``` + +### Homepage + +The homepage is defined by : `_layouts\home.html`, and it uses `_includes\archive-single.html` as its default content + +### Navigation + +To customize the navigation bar on top of the blog: `_data\navigation.yml`, for example, I added the `Home` menu : + +```yml +# main links +main: + # - title: "Quick-Start Guide" + # url: https://mmistakes.github.io/minimal-mistakes/docs/quick-start-guide/ + # - title: "About" + # url: https://mmistakes.github.io/minimal-mistakes/about/ + # - title: "Sample Posts" + # url: /year-archive/ + # - title: "Sample Collections" + # url: /collection-archive/ + # - title: "Sitemap" + # url: /sitemap/ + - title: "Home" + url: / +``` + +The `Search` menu in the navigation bar is set by the `search` option in the global `_config.yml` file, the default value is false which disables the Search menu : + +```yml +search : true # true, false (default) +``` + +### Add notice (Primary, Info, Success, Warning, Danger) + +Append a new line under the text bloc, and insert the notice tag there : +- + +Other external notice methods : +- +- + +### Larger width + +The `$x-large` size defined in the file `_Variables.scss` is set at `1280px`, which is good as per the maintainer's idea of in favor of the [readability](https://github.com/mmistakes/minimal-mistakes/issues/2214#issuecomment-1087605993), but is still too narrow for me, I have large 34" screen, and I like the width https://docs.microsoft.com/, so just set `$x-large: 1520px !default;` to have similar size as Microsoft docs. + +## Write a post + +All Jekyll posts should be written in [markdown .md](https://en.wikipedia.org/wiki/Markdown) or HTML formats, and Jekyll uses Ruby's [Kramdown](https://kramdown.gettalong.org/) as its default markdown converter. + +You can also use other formats for post files, but you should provide the corresponding convertor. If you want to host your Jekyll blog on the Github Pages, it is suggested to use Kramdown because Github Pages has its own white list of the Jekyll plugins, your convertor plugin might not be available on Github Pages, so your post won't be displayed correctly as expected. + + + +All post files should be put into the `_posts` folder, Jekyll requires blog post files to be named according to the following format: + +```bash +YEAR-MONTH-DAY-title.MARKUP + +# examples: +2011-12-31-new-years-eve-is-awesome.md +2012-09-12-how-to-write-a-blog.md +``` + +You don't need to put all the files under the root of _posts folder, you can also use year and month as the sub folder name : + +```powershell +> tree ./_posts /f + +D:\XIANG\GIT\COPDIPS.GITHUB.IO\_POSTS +└─2018 + 2018-05-03-setting-up-github-pages-with-custom-domain-over-https.md + 2018-05-07-setting-up-powershell-gallery-and-nuget-gallery-for-powershell.md + 2018-05-16-powershell-stop-parsing.md +``` + +## Write a draft + +Jekyll draft files should be saved into `_drafts` folder. The files in this folder won't be displayed. + +## Define the post url + +The default post URL is `https://yourdomain/post-name` + +If you want to custom it, edit `permalink` in the `_config.xml` file, I'm using the following format : + +```yml +permalink: /:year/:month/:title.html +``` + +## Change the post skin look + +The Jekyll post is using the Minimal Mistake theme, so the post skin is defined by the `minimal_mistakes_skin` option in `_config.yml` file. + +All skin look related files can be found in `_sass` folder, for example : + +- _air.scss (This blog is using air skin) +- _base.scss +- _footer.scss +- _sidebar.scss +- etc. + +## Preview the blog locally on Windows + +From Powershell console : + +```powershell +> bundle exec jekyll serve -w + +Configuration file: D:/xiang/git/copdips.github.io/_config.yml + Source: D:/xiang/git/copdips.github.io + Destination: D:/xiang/git/copdips.github.io/_site + Incremental build: disabled. Enable with --incremental + Generating... + done in 6.534 seconds. + Please add the following to your Gemfile to avoid polling for changes: + gem 'wdm', '>= 0.1.0' if Gem.win_platform? + Auto-regeneration: enabled for 'D:/xiang/git/copdips.github.io' + Server address: http://127.0.0.1:4000 + Server running... press ctrl-c to stop. +``` + +The outputs tell that you can visit your site from : [http://127.0.0.1:4000](http://127.0.0.1:4000) + +Except you modify the `_config.yml` file, all the other modifications can trigger automatically the regeneration of the blog pages, and just refresh your blog page from the navigator, you can read the new version right away. But any modification in _config.yml needs the relaunch of `bundle exec jekyll serve -w` command to see the result. + +## Add non-whitelisted plugins (gems) + +GitHub Pages runs in `safe` mode and only allows [a set of whitelisted plugins](https://pages.github.com/versions/). To use the gem in GitHub Pages, one of the workarounds is to use CI (e.g. travis, github workflow) and deploy to your `gh-pages` branch like: [jekyll-deploy-action](https://github.com/jeffreytse/jekyll-deploy-action), and I use this plugin: [jekyll-spaceship](https://github.com/jeffreytse/jekyll-spaceship) in my github pages. + +## Using mermaid in github pages + +Above `jekyll-spaceship` plugin can render the mermaid code but not very well as described [here](https://github.com/jeffreytse/jekyll-spaceship/issues/60). + +Currently, there're two better solutions by using the [mermaid javascript API](https://mermaid-js.github.io/mermaid/#/n00b-gettingStarted?id=_3-calling-the-javascript-api). + +The **first solution** is to use the mermaid API directly, it's inspired by this [post](https://jojozhuang.github.io/tutorial/jekyll-diagram-with-mermaid/). You can refer to this [commit](https://github.com/copdips/copdips.github.io/commit/6e9fde29abff7691ccfd7b7b0ad7158651931ed5) to see how to use it. The steps are as follows: + +1. create a file `mermaid.html` inside the folder `_includes`. The file content could be found on the [mermaid js official website](https://mermaid-js.github.io/mermaid/#/n00b-gettingStarted?id=_3-calling-the-javascript-api). +2. update the file `_includes/head.html` to include the new file `mermaid.html` with or without the condition on the var `page.mermaid` +3. in post where we need to render the mermaid diagrams, just put the code in side a html div block by set the class to `mermaid` like: `
`. If the step 2 has set a condition on the var `page.mermaid`, you need to aslo add a variable named `mermaid` and set its value to `true` in the post header. + +The **seconde solution** is to install the gem plugin [jekyll-mermaid](https://github.com/jasonbellamy/jekyll-mermaid) where the underlying implementation uses the mermaid API too, This is what I'm using as per this [commit](https://github.com/copdips/copdips.github.io/commit/61af59a1ed4b18c392b1eeed1dbcb9293c8be650), it's a little bitter easier than the first solution. diff --git a/docs/posts/2018/2018-05-22-using-readline-in-python-repl-on-windows.md b/docs/posts/2018/2018-05-22-using-readline-in-python-repl-on-windows.md new file mode 100644 index 00000000..d4868010 --- /dev/null +++ b/docs/posts/2018/2018-05-22-using-readline-in-python-repl-on-windows.md @@ -0,0 +1,69 @@ +--- +authors: +- copdips +categories: +- python +comments: true +date: + created: 2018-05-22 +description: Use PyReadline and PtPython to have the powerful Readline in Python REPL + on Windows OS. +--- + +# Using Readline In Python REPL On Windows With PyReadline and PtPython + +As an ex-sysadmin, I'm in love with the [Readline](https://en.wikipedia.org/wiki/GNU_Readline). In Powershell, we have its variation [PSReadline](https://github.com/lzybkr/PSReadLine). In Python REPL on Windows OS, I'll show you the [PyReadline](https://pythonhosted.org/pyreadline/) and the [PtPython](https://github.com/jonathanslenders/ptpython). + + + +## PyReadline + +When you search on Internet, you will find many tutorials telling you to install a Python module called [readline](https://pypi.org/project/readline/), but unfortunately, it's not compatible on Windows OS : + +```python +> pip install readline + +Collecting readline + Using cached https://files.pythonhosted.org/packages/f4/01/2cf081af8d880b44939a5f1b446551a7f8d59eae414277fd0c303757ff1b/readline-6.2.4.1.tar.gz + Complete output from command python setup.py egg_info: + error: this module is not meant to work on Windows +``` + +On Windows, the counterpart is PyReadline, install it by : +```powershell +pip install pyreadline +``` + +Here are the features of PyReadline : +- keyboard text selection and copy/paste +- Shift-arrowkeys for text selection +- Control-c can be used for copy activate with allow_ctrl_c(True) in config file +- Double tapping ctrl-c will raise a KeyboardInterrupt, use ctrl_c_tap_time_interval(x) where x is your preferred tap time window, default - 0.3 s. +- paste pastes first line of content on clipboard. +- ipython_paste, pastes tab-separated data as list of lists or numpy array if all data is numeric +- paste_mulitline_code pastes multi line code, removing any empty lines. + +!!! warning + + PyReadline was used by IPython, but since it hasn't been maintained since 2015, IPython [removed it](https://github.com/ipython/ipython/blob/60f802938467731f555f694514e6592288455a1c/docs/source/whatsnew/version5.rst#new-terminal-interface), and replaced it by [prompt_toolkit](http://python-prompt-toolkit.readthedocs.io/en/stable/). + +As PyReadline must be used inside Python REPL, you need to type `import PyReadline` from the very beginning of the Python REPL. To be a lazy devops, just add the import into a file and let Python to source it before the first prompt is displayed by using [$env:PYTHONSTARTUP](https://docs.python.org/3/using/cmdline.html#envvar-PYTHONSTARTUP) : + +```powershell +# In powershell console +Add-Content $env:USERPROFILE/.pythonrc "`nimport PyReadline" +$env:PYTHONSTARTUP = "$env:USERPROFILE/.pythonrc" +``` + +## PtPython + +Previous chapter mentioned that PyReadline is no more maintained, so here comes the [PtPython](https://github.com/jonathanslenders/ptpython). + +PyPython is an interactive Python Shell, build on top of [prompt_toolkit](https://github.com/jonathanslenders/python-prompt-toolkit) written by the same author [Jonathan Slenders](https://github.com/jonathanslenders). + +Install PtPython by : +```powershell +pip install ptpython +``` + +Start it by typing simply : `ptpython` from the terminal, it will start a Python REPL with prompt_toolkit integrated, nothing to set on $env:USERPROFILE diff --git a/docs/posts/2018/2018-05-26-grep-like-powershell-colorful-select-string.md b/docs/posts/2018/2018-05-26-grep-like-powershell-colorful-select-string.md new file mode 100644 index 00000000..86f63f7f --- /dev/null +++ b/docs/posts/2018/2018-05-26-grep-like-powershell-colorful-select-string.md @@ -0,0 +1,463 @@ +--- +authors: +- copdips +categories: +- powershell +- file +comments: true +date: + created: 2018-05-26 + updated: 2019-12-31 +description: Select-String with color, make Select-String of Powershell to highlight + the search pattern like grep in Unix. +--- + +# Select-ColorString : A Unix's grep-like Powershell Cmdlet Based On Select-String With Color + +## Update 2019-12-28 Powershell 7 Select-String default highlighting + +Update 2019-12-28: It's very exciting to see that **since [Powershell 7](https://github.com/PowerShell/PowerShell/pull/8963), the Select-String has highlighting (internal name: emphasis) by default**. It uses similar way (index, length) to find and highlight the matches. The emphasis uses negative colors based on your PowerShell background and text colors. To [disable the emphasis](https://docs.microsoft.com/en-us/powershell/module/microsoft.powershell.utility/select-string?view=powershell-7), use the `-NoEmphasis` switch. **So I highly recommend everyone to switch to Powershell 7 ([RC is supported by Microsoft](https://devblogs.microsoft.com/powershell/announcing-the-powershell-7-0-release-candidate/))**, it has also many other new powerful features. + +BTW, in Powershell 7, Select-String `-AllMatches` is set as $false by default. I think it would be nice to have an inverse switch -NoAllMatches just like -NoEmphasis, and let -AllMatches to be $true by default. + +**Update 2019-12-31**: I just found [a workaround here](https://github.com/PowerShell/PowerShell/issues/11447#issuecomment-569854982), by specifying `$PSDefaultParameterValues['Select-String:AllMatches'] = $true` in the Profile.ps1. I don't know if you have the same feeling as the mine, this feature is killing, it will help me for many other things :) + +Powershell 7 Select-String default highlighting demo: + +![](../../assets/blog_images/2018-05-26-grep-like-powershell-colorful-select-string/powershell7-default-highlighting.png) + +The original post before the Emphasis has been introduced in Powershell 7: + +> [Select-String](https://docs.microsoft.com/en-us/powershell/module/microsoft.powershell.utility/select-string?view=powershell-6) in Powershell is a very powerful cmdlet to search a string or a pattern in input or files. It's very much like the famous command-line [grep](https://www.gnu.org/savannah-checkouts/gnu/grep/manual/grep.html) in Unix. But from my personal point of view, it's a little bit pity that Select-String doesn't highlight the matching patterns, so I will show you in this post how to make it possible (more or less) with Select-ColorString. + +## Trace-Word + +First of all, I must mention another powershell cmdlet [Trace-Word that I read on Prateek Singh's blog ridicurious.com](https://ridicurious.com/2018/03/14/highlight-words-in-powershell-console/). + +Let me show you a screenshot of his Trace-Word to let you have an idea about what it can do: + +![](../../assets/blog_images/2018-05-26-grep-like-powershell-colorful-select-string/trace-word-screenshot.png) + +Indeed, I was deeply impressed when I read his post, the color in Powershell string search results had been one of my most expected Powershell functionalities. Prateek Singh made it, thanks! + +When I checked the code source of Trace-Word, I found the cmdlet logic is: + +1. Firstly reads the input content line by line: + + ```powershell + $content | ForEach-Object {...} + ``` + +1. And then splits each line by white-space: + ```powershell + `$_.split() | Where-Object { + -not [string]::IsNullOrWhiteSpace($_) + } | ForEach-Object{...} + ``` + +1. At last checks each splitted token against the searching words: + ```powershell + if($Token -like "*$Word*") { + $before, $after = $Token -Split "$Word"; + ... + } + ``` + +1. Now we have `$before, $Word, $after`, so just need to `Write-Host $Word with color` to highlight the wanted $Word. + +That's done, pretty cool and quite straightforward, nothing complicated, I like it so much. + +I contacted Prateek to ask if I can use his idea to write something similar but with another method, he said YES and that comes my Select-ColorString, thanks Prateek again. + +## Select-ColorString + +Although Prateek Singh's Trace-Word is wonderful enough, I still want a bit more capabilities: the regex and the customizable color choice. + +The first thing that I thought about the regex is `Select-String` which I'm using almost everyday with sls. + +> Sometimes I was obliged to use the DOS command-line `findstr` due to that Select-String catches the input too earlier before it is been displayed a pure string on console screen. But findstr just finds what you want among what is shown on the screen. Although `$input | Out-String | Select-String` might solve the issue sometimes but it's not sexy to use 2 cmdlets to do one single task and sometimes this workaround even doesn't work. + +Powershell Select-String returns some `MatchInfo` objects, from its MemberType, the `Matches` property is what I will use to color the matching patterns. The `Index` key gives the index of the first char of the matching pattern in a given line string, with that I know from where I could Write-Host with color. + +```powershell +PS> 'a is good, b is good too' | sls good -AllMatches | gm + + + TypeName:Microsoft.PowerShell.Commands.MatchInfo + +Name MemberType Definition +---- ---------- ---------- +Equals Method bool Equals(System.Object obj) +GetHashCode Method int GetHashCode() +GetType Method type GetType() +RelativePath Method string RelativePath(string directory) +ToString Method string ToString(), string ToString(string directory) +Context Property Microsoft.PowerShell.Commands.MatchInfoContext Context {get;set;} +Filename Property string Filename {get;} +IgnoreCase Property bool IgnoreCase {get;set;} +Line Property string Line {get;set;} +LineNumber Property int LineNumber {get;set;} +Matches Property System.Text.RegularExpressions.Match[] Matches {get;set;} +Path Property string Path {get;set;} +Pattern Property string Pattern {get;set;} + + +PS> 'a is good, b is good too' | sls good -AllMatches | % matches + + +Groups : {0} +Success : True +Name : 0 +Captures : {0} +Index : 5 +Length : 4 +Value : good + +Groups : {0} +Success : True +Name : 0 +Captures : {0} +Index : 16 +Length : 4 +Value : good +``` + +So for my Select-ColorString, its logic is: + +1. Split the input content in lines. + + ```powershell + foreach ($line in $Content) {...} + ``` + +1. Find all the matches in a given line. + + ```powershell + $paramSelectString = @{ + Pattern = $Pattern + AllMatches = $true + CaseSensitive = $CaseSensitive + } + $matchList = $line | Select-String @paramSelectString + ``` + +1. Write `without color` for the string before the match. + + ```powershell + $index = 0 + foreach ($myMatch in $matchList.Matches) { + $length = $myMatch.Index - $index + Write-Host $line.Substring($index, $length) -NoNewline + ... + } + ``` + +1. Right after, write the match `with color`. + + ```powershell + foreach ($myMatch in $matchList.Matches) { + ... + $paramWriteHost = @{ + Object = $line.Substring($myMatch.Index, $myMatch.Length) + NoNewline = $true + ForegroundColor = $ForegroundColor + BackgroundColor = $BackgroundColor + } + Write-Host @paramWriteHost + ... + } + ``` + +1. Recalculate the index for the next match in the same line. + + ```powershell + $index = 0 + foreach ($myMatch in $matchList.Matches) { + ... + $index = $myMatch.Index + $myMatch.Length + } + ``` + +1. When there's no more matches in the same line, just write `without color` all the rest. + + ```powershell + $index = 0 + foreach ($myMatch in $matchList.Matches) { + ... + $index = $myMatch.Index + $myMatch.Length + } + Write-Host $line.Substring($index) + ``` + +That's all, let's see a demo on Select-ColorString. + +## Select-ColorString demo + +The demo reads in real-time a test file and use Select-ColorString to highlight the keyword `warn` + +![](../../assets/blog_images/2018-05-26-grep-like-powershell-colorful-select-string/Select-ColorString-demo.gif) + +## Select-String & -Split + +In fact Powershell -split operator can also [take regex pattern](https://docs.microsoft.com/en-us/powershell/module/microsoft.powershell.core/about/about_split?view=powershell-6#options), and is as powerful as Select-String can do in terms of searching pattern. The reason that I chose Select-String instead of -split is because : + +1. Select-String makes sense to 'port' Unix grep on Powershell, they're both for searching patterns and display them. + +1. -split just splits the line by pattern, you still need to iterate on each splitted token and perform a -like or -match operation, which might take more time to display then Select-String does, as the later stores the matches already, it just needs to move the index and display the matches in color. But to be honest, I've never tested the execution duration difference between -split and Select-String, maybe -split is faster. + +When I have time, I will write new function based on -split with regex to test its power. + +## Trace-Word & Select-ColorString + +Both of them are in my toolkit, and I use them in different scenarios. + +- When I only need to search patterns based on words, I will `use Trace-Word`, as it can display different colors on different words. A typical use case is monitoring the log files which have some keywords like info, warning, error, etc. The output is much more beautiful. +- When I need to search patterns which include white space for example, I will `use Select-ColorString` as it takes regex and it doesn't split the line by white space in advance + +BTW, I also set an alias on each of them: + +```powershell +PS> Set-Alias tw Trace-Word +PS> Set-Alias scs Select-ColorString +``` + +## Update 2018-11-19 on new switch -MultiColorsForSimplePattern + +I added a new switch [-MultiColorsForSimplePattern](https://github.com/copdips/PSScripts/commit/76361019f11602d607e7d95199a6f34e0a666c39) last week. This switch enables the Select-ColorString to display the different keywords in different colors just like Trace-Word. This is very useful at least for me to search some keywords like *error*, *warning* in the log files. + +![](../../assets/blog_images/2018-05-26-grep-like-powershell-colorful-select-string/new-switch-MultiColorsForSimplePattern.PNG) + +There's a limitation on this new switch that the multicolors only works for simple pattern which contains only keywords separated by "\|" as shown in above screenshot. And it cannot be used with regex, this is because by using regex, the color selection will take much more time than the simple keywords. Maybe in the future I will add a new switch **-MultiColorsForRegexPatternWithFastCpu**. + +## Select-ColorString source code + +Finally, you can find the the source code of [Select-ColorString on Github](https://github.com/copdips/PSScripts/blob/master/Text/Select-ColorString.ps1). + +> As I forced to use only a few of the original Select-String's parameters, Select-ColorString cannot do everything that Select-String does, that's why I said *more or less* at the beginning of this post. +> +> Some better ways that I think to archive the goal is whether use [ValueFromRemainingArguments](https://docs.microsoft.com/en-us/powershell/module/microsoft.powershell.core/about/about_functions_advanced_parameters?view=powershell-6#valuefromremainingarguments-argument) to send all the remaing non-handled Select-ColorString parameters to Select-String, whether let Microsoft Powershell team to modify directly the [Types.ps1xml](https://docs.microsoft.com/en-us/powershell/module/microsoft.powershell.core/about/about_types.ps1xml?view=powershell-6) + +{% highlight powershell linenos %} +function Select-ColorString { + <# + .SYNOPSIS + + Find the matches in a given content by the pattern and write the matches in color like grep. + + .NOTES + + inspired by: https://ridicurious.com/2018/03/14/highlight-words-in-powershell-console/ + + .EXAMPLE + + > 'aa bb cc', 'A line' | Select-ColorString a + + Both line 'aa bb cc' and line 'A line' are displayed as both contain "a" case insensitive. + + .EXAMPLE + + > 'aa bb cc', 'A line' | Select-ColorString a -NotMatch + + Nothing will be displayed as both lines have "a". + + .EXAMPLE + + > 'aa bb cc', 'A line' | Select-ColorString a -CaseSensitive + + Only line 'aa bb cc' is displayed with color on all occurrences of "a" case sensitive. + + .EXAMPLE + + > 'aa bb cc', 'A line' | Select-ColorString '(a)|(\sb)' -CaseSensitive -BackgroundColor White + + Only line 'aa bb cc' is displayed with background color White on all occurrences of regex '(a)|(\sb)' case sensitive. + + .EXAMPLE + + > 'aa bb cc', 'A line' | Select-ColorString b -KeepNotMatch + + Both line 'aa bb cc' and 'A line' are displayed with color on all occurrences of "b" case insensitive, + and for lines without the keyword "b", they will be only displayed but without color. + + .EXAMPLE + + > Get-Content app.log -Wait -Tail 100 | Select-ColorString "error|warning|critical" -MultiColorsForSimplePattern -KeepNotMatch + + Search the 3 key words "error", "warning", and "critical" in the last 100 lines of the active file app.log and display the 3 key words in 3 colors. + For lines without the keys words, hey will be only displayed but without color. + + .EXAMPLE + + > Get-Content "C:\Windows\Logs\DISM\dism.log" -Tail 100 -Wait | Select-ColorString win + + Find and color the keyword "win" in the last ongoing 100 lines of dism.log. + + .EXAMPLE + + > Get-WinEvent -FilterHashtable @{logname='System'; StartTime = (Get-Date).AddDays(-1)} | Select-Object time*,level*,message | Select-ColorString win + + Find and color the keyword "win" in the System event log from the last 24 hours. + #> + + [Cmdletbinding(DefaultParametersetName = 'Match')] + param( + [Parameter( + Position = 0)] + [ValidateNotNullOrEmpty()] + [String]$Pattern = $(throw "$($MyInvocation.MyCommand.Name) : " ` + + "Cannot bind null or empty value to the parameter `"Pattern`""), + + [Parameter( + ValueFromPipeline = $true, + HelpMessage = "String or list of string to be checked against the pattern")] + [String[]]$Content, + + [Parameter()] + [ValidateSet( + 'Black', + 'DarkBlue', + 'DarkGreen', + 'DarkCyan', + 'DarkRed', + 'DarkMagenta', + 'DarkYellow', + 'Gray', + 'DarkGray', + 'Blue', + 'Green', + 'Cyan', + 'Red', + 'Magenta', + 'Yellow', + 'White')] + [String]$ForegroundColor = 'Black', + + [Parameter()] + [ValidateSet( + 'Black', + 'DarkBlue', + 'DarkGreen', + 'DarkCyan', + 'DarkRed', + 'DarkMagenta', + 'DarkYellow', + 'Gray', + 'DarkGray', + 'Blue', + 'Green', + 'Cyan', + 'Red', + 'Magenta', + 'Yellow', + 'White')] + [ValidateScript( { + if ($Host.ui.RawUI.BackgroundColor -eq $_) { + throw "Current host background color is also set to `"$_`", " ` + + "please choose another color for a better readability" + } + else { + return $true + } + })] + [String]$BackgroundColor = 'Yellow', + + [Parameter()] + [Switch]$CaseSensitive, + + [Parameter( + HelpMessage = "Available only if the pattern is simple non-regex string " ` + + "separated by '|', use this switch with fast CPU.")] + [Switch]$MultiColorsForSimplePattern, + + [Parameter( + ParameterSetName = 'NotMatch', + HelpMessage = "If true, write only not matching lines; " ` + + "if false, write only matching lines")] + [Switch]$NotMatch, + + [Parameter( + ParameterSetName = 'Match', + HelpMessage = "If true, write all the lines; " ` + + "if false, write only matching lines")] + [Switch]$KeepNotMatch + ) + + begin { + $paramSelectString = @{ + Pattern = $Pattern + AllMatches = $true + CaseSensitive = $CaseSensitive + } + $writeNotMatch = $KeepNotMatch -or $NotMatch + + [System.Collections.ArrayList]$colorList = [System.Enum]::GetValues([System.ConsoleColor]) + $currentBackgroundColor = $Host.ui.RawUI.BackgroundColor + $colorList.Remove($currentBackgroundColor.ToString()) + $colorList.Remove($ForegroundColor) + $colorList.Reverse() + $colorCount = $colorList.Count + + if ($MultiColorsForSimplePattern) { + # Get all the console foreground and background colors mapping display effet: + # https://gist.github.com/timabell/cc9ca76964b59b2a54e91bda3665499e + $patternToColorMapping = [Ordered]@{} + # Available only if the pattern is a simple non-regex string separated by '|', use this with fast CPU. + # We dont support regex as -Pattern for this switch as it will need much more CPU. + # This switch is useful when you need to search some words, + # for example searching "error|warn|crtical" these 3 words in a log file. + $expectedMatches = $Pattern.split("|") + $expectedMatchesCount = $expectedMatches.Count + if ($expectedMatchesCount -ge $colorCount) { + Write-Host "The switch -MultiColorsForSimplePattern is True, " ` + + "but there're more patterns than the available colors number " ` + + "which is $colorCount, so rotation color list will be used." ` + -ForegroundColor Yellow + } + 0..($expectedMatchesCount -1) | % { + $patternToColorMapping.($expectedMatches[$_]) = $colorList[$_ % $colorCount] + } + + } + } + + process { + foreach ($line in $Content) { + $matchList = $line | Select-String @paramSelectString + + if (0 -lt $matchList.Count) { + if (-not $NotMatch) { + $index = 0 + foreach ($myMatch in $matchList.Matches) { + $length = $myMatch.Index - $index + Write-Host $line.Substring($index, $length) -NoNewline + + $expectedBackgroupColor = $BackgroundColor + if ($MultiColorsForSimplePattern) { + $expectedBackgroupColor = $patternToColorMapping[$myMatch.Value] + } + + $paramWriteHost = @{ + Object = $line.Substring($myMatch.Index, $myMatch.Length) + NoNewline = $true + ForegroundColor = $ForegroundColor + BackgroundColor = $expectedBackgroupColor + } + Write-Host @paramWriteHost + + $index = $myMatch.Index + $myMatch.Length + } + Write-Host $line.Substring($index) + } + } + else { + if ($writeNotMatch) { + Write-Host "$line" + } + } + } + } + + end { + } +} +{% endhighlight %} diff --git a/docs/posts/2018/2018-06-03-converting-python-json-list-to-csv-in-2-lines-of-code-by-pandas.md b/docs/posts/2018/2018-06-03-converting-python-json-list-to-csv-in-2-lines-of-code-by-pandas.md new file mode 100644 index 00000000..e06457b2 --- /dev/null +++ b/docs/posts/2018/2018-06-03-converting-python-json-list-to-csv-in-2-lines-of-code-by-pandas.md @@ -0,0 +1,50 @@ +--- +authors: +- copdips +categories: +- python +- file +comments: true +date: + created: 2018-06-03 +description: One of the fastest way to convert Python json dict list to csv file with + only 2 lines of code by pandas +--- + +# Converting Python json dict list to csv file in 2 lines of code by pandas + +Converting a Powershell object list to a csv file is quiet easy, for example : +```powershell +6.0.2> gps | select name,id,path | ConvertTo-Csv | Out-File .\gps.csv ; ii .\gps.csv +```` +I'll show you in this post the Python way to convert a dict list to a csv file. + + + +During my work, I got a result in Python dict list type, I needed to send it to other teams who are not some Python guys. One of the most commonly used sharing file type is the [csv file](https://fr.wikipedia.org/wiki/Comma-separated_values). When I googled how to convert json to csv in Python, I found many ways to do that, but most of them need quiet a lot of code to accomplish this common task. I was a sysadmin, I don't like to write many lines for a single task, and I also don't like to reinvent the wheel. Finally, I found the [Python pandas module](https://pandas.pydata.org/) which lets me to achieve this goal in only 2 lines of code. + +pandas is an open source, BSD-licensed library providing high-performance, easy-to-use data structures and data analysis tools for the Python programming language. + + + +Here's the code : + +```python +>>> import json + +# first line of code: import the pandas module +>>> import pandas + +# generate a python dict list +>>> data= [{'name':'a', 'value':1}, {'name':'b', 'value':2}] + +# second line of code: convert the dict list to csv and save it into the file pandas.csv +>>> pandas.read_json(json.dumps(data)).to_csv('pandas.csv') + +# verify the csv file content +>>> with open('pandas.csv') as f: +... print(f.read()) +,name,value +0,a,1 +1,b,2 +``` diff --git a/docs/posts/2018/2018-06-21-import-python-module-with-sys-path-when-without-init-file.md b/docs/posts/2018/2018-06-21-import-python-module-with-sys-path-when-without-init-file.md new file mode 100644 index 00000000..9d57bc18 --- /dev/null +++ b/docs/posts/2018/2018-06-21-import-python-module-with-sys-path-when-without-init-file.md @@ -0,0 +1,111 @@ +--- +authors: +- copdips +categories: +- python +comments: true +date: + created: 2018-06-21 +description: When file A needs to import a function from the file B in another folder, + and B is not in a module, we can use the sys.path variable. +--- + +# Import Python module with sys.path variable when without `__init__` file + +We're familiar to put a python file inside a folder, and create a `__init__.py` file under the same folder, then we can easily import the file by import the folder, as the folder is transformed to a python module. But if we don't have the \_\_init\_\_.py, how can we import it? + + + +Suppose that we have a Flask project, during the development of Flask, we need to use the function **flask_ctx_get_request_id()** in the file **request_id.py** from the repo [https://github.com/Workable/flask-log-request-id](https://github.com/Workable/flask-log-request-id). + +Here is the current folder tree, there's only one file flask.py: + +``` +D:\xiang\git\test\flask_project +│ flask.py +``` + +I add the repo as a submodule: + +```powershell +> git submodule add https://github.com/Workable/flask-log-request-id.git +``` + +Then my folder tree is like this: + +``` +D:\xiang\git\test\flask_project +│ .gitmodules +│ flask.py +│ +└─flask-log-request-id + │ + ├─flask_log_request_id + │ │ ctx_fetcher.py + │ │ filters.py + │ │ parser.py + │ │ request_id.py + │ │ __init__.py + │ │ + │ └─extras + │ celery.py + │ __init__.py + │ + └─(more files and folders and ignored ...) +``` + +In flask.py, I try to import the function by importing the folder flask-log-request-id: +```python +# flask.py +from flask-log-request-id.flask_log_request_id.request_id import flask_ctx_get_request_id +``` + +Test the import: +```python +> python .\flask.py + File ".\flask.py", line 1 + from flask-log-request-id.flask_log_request_id.request_id import flask_ctx_get_request_id + ^ +SyntaxError: invalid syntax +``` + +The `flask-log-request-id` folder is not importable because it doesn't contain the __init_.py file. I don't want to manually create it, it has no sense here. The workaround is to use the [sys.path](https://docs.python.org/3/tutorial/modules.html#the-module-search-path) variable. + +6.1.2. The Module Search Path + + + +> +>When a module named spam is imported, the interpreter first searches for a built-in module with that name. If not found, it then searches for a file named spam.py in a list of directories given by the variable [sys.path](https://docs.python.org/3/library/sys.html#sys.path). [sys.path](https://docs.python.org/3/library/sys.html#sys.path) is initialized from these locations: +> +- The directory containing the input script (or the current directory when no file is specified). +- [PYTHONPATH](https://docs.python.org/3/using/cmdline.html#envvar-PYTHONPATH) (a list of directory names, with the same syntax as the shell variable PATH). +- The installation-dependent default. + + + +> +!!! note + + On file systems which support symlinks, the directory containing the input script is calculated after the symlink is followed. In other words the directory containing the symlink is not added to the module search path. + + + +> +After initialization, Python programs can modify [sys.path](https://docs.python.org/3/library/sys.html#sys.path). The directory containing the script being run is placed at the beginning of the search path, ahead of the standard library path. This means that scripts in that directory will be loaded instead of modules of the same name in the library directory. This is an error unless the replacement is intended. See section Standard Modules for more information. + + + +As per the official doc, I could add the path of the `flask-log-request-id` folder to the sys.path variable, than the module `flask_log_request_id` will be directly searchable by python process. + +```python +# flask.py +import sys +sys.path.append(r"d:/xiang/git/test/flask_project/flask-log-request-id") + +from flask_log_request_id.request_id import flask_ctx_get_request_id +``` + +!!! note + + The import will still fail due to the file `d:/xiang/git/test/flask_project/flask-log-request-id\flask_log_request_id\__init__.py`, resolving this issue is out of scope of this blog, the adding path to the `sys.path` to find the `flask_log_request_id` module works well. diff --git a/docs/posts/2018/2018-06-22-git-untrack-submodule-from-git-status.md b/docs/posts/2018/2018-06-22-git-untrack-submodule-from-git-status.md new file mode 100644 index 00000000..adc54d8e --- /dev/null +++ b/docs/posts/2018/2018-06-22-git-untrack-submodule-from-git-status.md @@ -0,0 +1,40 @@ +--- +authors: +- copdips +categories: +- git +comments: true +date: + created: 2018-06-22 +description: submodule folders cannot be added into .gitignore file to untrack them + from git status, we will use ignore=dirty to ignore it +--- + +# Git untrack submodule from git status + +When we have submodules in a git repo, even if we add the submodules' folders into the `.gitignore` file, these submodules folders are still tracked from the `git status` output. + +## Method 1: .gitmodules file + +There're several methods to ignore it, one of them is in `.gitmodules` file, add following line `ignore = dirty` under each submodule, example : +```s +[submodule "bundle/fugitive"] + path = bundle/fugitive + url = git://github.com/tpope/vim-fugitive.git + ignore = dirty +``` + +## Method 2: switch --ignore-submodules=dirty + +Another method is to use the swicth [`--ignore-submodules=dirty`](https://git-scm.com/docs/git-status#git-status---ignore-submodulesltwhengt) of `git status` (available from git version 1.7.2) and create an alias to shorten the typing. + +> --ignore-submodules[=] +> +> Ignore changes to submodules when looking for changes. can be either "none", "untracked", "dirty" or "all", which is the default. Using "none" will consider the submodule modified when it either contains untracked or modified files or its HEAD differs from the commit recorded in the superproject and can be used to override any settings of the ignore option in git-config[1] or gitmodules[5]. When "untracked" is used submodules are not considered dirty when they only contain untracked content (but they are still scanned for modified content). Using "dirty" ignores all changes to the work tree of submodules, only changes to the commits stored in the superproject are shown (this was the behavior before 1.7.0). Using "all" hides all changes to submodules (and suppresses the output of submodule summaries when the config option status.submoduleSummary is set). + +```powershell +> git status --ignore-submodules=dirty + +# create the alias if you like +> git config --glo alias.gst='status --ignore-submodules=dirty' +``` diff --git a/docs/posts/2018/2018-06-26-install-python-on-windows-with-powershell-without-administrator-privileges.md b/docs/posts/2018/2018-06-26-install-python-on-windows-with-powershell-without-administrator-privileges.md new file mode 100644 index 00000000..a157518f --- /dev/null +++ b/docs/posts/2018/2018-06-26-install-python-on-windows-with-powershell-without-administrator-privileges.md @@ -0,0 +1,183 @@ +--- +authors: +- copdips +categories: +- python +- powershell +- package +comments: true +date: + created: 2018-06-26 + updated: 2019-12-30 +description: To fast deploy Python on Windows OS and without administrator privileges, + we can use Powershell Install-Package to archive it. +--- + +# Install Python on Windows with Powershell without administrator privileges + +As a Windows DevOps, I often use Powershell and Python, Powershell is installed by Windows out of box, but this is not for Python. And for my working environment, I don't have the administrator privileges on some servers. I will show you in this post how to rapidly deploy Python on Windows as a standard user by using Powershell with Nuget. + + + +## Update 2019-12-30 Installing Python by Scoop + +Installing Python on Windows by [Scoop](https://github.com/lukesampson/scoop) is the simplest way so far if you have Internet access. + +To switch between different Python versions, please check this [doc](https://github.com/lukesampson/scoop/wiki/Switching-Ruby-and-Python-Versions). + +## Finding Python packages + +If you cannot use Find-Package to search pacakges in Nuget repository, please check my post on [Setting Up Nuget for Powershell](https://copdips.com/2018/05/setting-up-powershell-gallery-and-nuget-gallery-for-powershell.html#set-up-nuget-for-powershell). + +We will install `python` with version 3.6.5 and `python2` with version 2.7.15. + +```powershell +> Find-Package python* +Name Version Source Summary +---- ------- ------ ------- +python 3.6.5 Nuget Installs 64-bit Python for use in build scenarios. +python-embed 3.6.1.1 Nuget Installs 64-bit Python for use in build scenarios a... +python2x86 2.7.15 Nuget Installs 32-bit Python 2.7 for use in build scenarios. +python2 2.7.15 Nuget Installs 64-bit Python 2.7 for use in build scenarios. +Python35 3.5.1.1 Nuget Python 3.5 API +Python36 3.6.0 Nuget Python 3.6 API +pythonAndroid-2.7-x86_64-22... 1.0.0.7 Nuget Python 2.7 android api version: 22.0.0 architecture... pythonAndroid-2.7-armeabi-v... 1.0.0.7 Nuget Python 2.7 android api version: 22.0.0 architecture... pythonAndroid-2.7-x86_64-23... 1.0.0.7 Nuget Python 2.7 android api version: 23.0.0 architecture... +Python27Dev 2.7.13 Nuget Python 2.7 unofficial dev environment package +pythonIOS-2.7-arm64-10.3 1.0.0.7 Nuget Python 2.7 iOS api version: 10.3 architecture: arm64 +PythonPlotter 0.2.15 Nuget Package to allow use of matplotlib from .NET.... +Python.Runtime 2.7.9 Nuget Python 2.7.9 as a single, stand-alone executable wi... +PythonLibs4CSharp 1.0.0 Nuget A collection of Iron Python compiled libraries with... +pythonx86 3.6.5 Nuget Installs 32-bit Python for use in build scenarios. +pythonnet_py35_dotnet 2.3.0 Nuget Python 3.5 and .NET Framework +pythonnet_py27_dotnet 2.3.0 Nuget Python 2.7 and .NET Framework +Python27 2.7.6 Nuget Python 2.7 API +PythonConsoleControl 1.0.1 Nuget PythonConsole +Python3 3.6.3.2 PSGallery Python3 interpreter +PythonSelect 1.0.0 PSGallery Select a Python distribution to use within a PowerS... +PythonConverter.dll 1.0.0 Nuget Package description +``` + +## Installing Python + +```powershell +# To install Python 3 +> Install-Package python -Scope CurrentUser + +# To install Python 2 +> Install-Package python2 -Scope CurrentUser +``` + +**Note 2018-08-29:** +!!! warning + + Current `Find-Package python* -AllVersion` gives the lastest python version is `v3.7.0`, but this version doesn't work, the last worked Nuget python version is `v3.6.6` + +## Adding Python to user path + +I will show you the way to add Python3 into the user PATH, it will be the same way for Python2. +I use the user PATH because I'm not admin on the Windows server, I cannot modify the system PATH. + +```powershell +# Get python3 package info path +> Get-Package python | % source +C:\Users\xiang\AppData\Local\ + +# For Nuget packages, the executable is always under the tools folder, and the tools folder is at the same level as .nupkg file. +> ls C:\Users\xiang\AppData\Local\PackageManagement\NuGet\Packages\python.3.6.5\tools\ + + Directory: C:\Users\xiang\AppData\Local\PackageManagement\NuGet\Packages\python.3.6.5\tools + +Mode LastWriteTime Length Name +---- ------------- ------ ---- +d----- 2018-06-26 00:15 DLLs +d----- 2018-06-26 00:15 include +d----- 2018-06-26 00:16 Lib +d----- 2018-06-26 00:15 libs +d----- 2018-06-26 00:49 Scripts +d----- 2018-06-26 00:15 Tools +-a---- 2018-03-28 17:10 100504 python.exe +-a---- 2018-03-28 17:10 58520 python3.dll +-a---- 2018-03-28 17:10 3610776 python36.dll +-a---- 2018-03-28 17:10 98968 pythonw.exe +-a---- 2018-03-28 17:10 88752 vcruntime140.dll + +# python needs to add 2 paths to the user PATH, one is the root folder containing python.exe, another is the Sripts folder. +> $pythonRootFolder = Join-Path (Split-Path (Get-Package python | % source)) "tools" +> $pythonScriptsFolder = Join-Path $pythonRootFolder "Scripts" +> $path = [System.Environment]::GetEnvironmentVariable('path', 'user') +> $path += ";$pythonRootFolder" +> $path += ";$pythonScriptsFolder;" +> [System.Environment]::SetEnvironmentVariable('path', $path, 'user') +``` + +## Reinstalling pip + +The default pip3.exe and pip2.exe have some strange behavior that just don't work : + +```powershell +> pip3 +Fatal error in launcher: Unable to create process using '"' + +> pip2 +Fatal error in launcher: Unable to create process using '"' +``` + +You can bypass the issue by using `python -m pip`, but I like to use pip directly without `python -m`, the trick is just reinstalling the pip: + +```powershell +> python -m pip uninstall pip -y +> python -m ensurepip +``` +Normally `python -m ensurepip` will install pip v9, if you want to install pip v10, just upgrade the v9: +```powershell +> pip3 --version +pip 9.0.3 from c:\users\xiang\appdata\local\packagemanagement\nuget\packages\python.3.6.5\tools\lib\site-packages (python 3.6) + +> python -m pip install -U pip +Collecting pip + Using cached https://files.pythonhosted.org/packages/0f/74/ecd13431bcc456ed390b44c8a6e917c1820365cbebcb6a8974d1cd045ab4/pip-10.0.1-py2.py3-none-any.whl +Installing collected packages: pip + Found existing installation: pip 9.0.3 + Uninstalling pip-9.0.3: + Successfully uninstalled pip-9.0.3 +Successfully installed pip-10.0.1 + +> pip3 --version +pip 10.0.1 from c:\users\xiang\appdata\local\packagemanagement\nuget\packages\python.3.6.5\tools\lib\site-packages\pip (python 3.6) +``` + +And we can find that when installing pip v10, the pip.exe is installed too, while in pip v9, we only have pip3.exe. + +```powershell +> ls C:\Users\xiang\AppData\Local\PackageManagement\NuGet\Packages\python.3.6.5\tools\Scripts\ + + Directory: C:\Users\xiang\AppData\Local\PackageManagement\NuGet\Packages\python.3.6.5\tools\Scripts + +Mode LastWriteTime Length Name +---- ------------- ------ ---- +-a---- 2018-03-28 17:10 98187 easy_install-3.6.exe +-a---- 2018-06-26 00:49 102812 pip.exe +-a---- 2018-06-26 00:49 102812 pip3.6.exe +-a---- 2018-06-26 00:49 102812 pip3.exe +-a---- 2018-06-26 00:29 98224 ptipython.exe +-a---- 2018-06-26 00:29 98224 ptipython3.exe +-a---- 2018-06-26 00:29 98223 ptpython.exe +-a---- 2018-06-26 00:29 98223 ptpython3.exe +-a---- 2018-06-26 00:29 98207 pygmentize.exe +``` + +**Update on 2018-07-27**: +!!! note + + The pip version has been jumped from v10 to [`v18`](https://pip.pypa.io/en/stable/news/#id1) directly, because PyPA switches the software versioning to [`CalVer`](https://calver.org/) + +## Configuring pip for PyPI + +If you're in enterprise environment, you may probably dont have access to the public Python packages repository https://pypi.org/, and in this case, your enterprise should have a local Artifactory which mirrors the public https://pypi.org/. So you need to add your enterprise Artifactory PyPI URL to you Python pip conf. + +You can find all the pip configuration details [here](https://pip.pypa.io/en/stable/user_guide/#configuration). + +For **JFrog Artifactory**: +!!! note + + diff --git a/docs/posts/2018/2018-07-25-use-pyvmomi-EventHistoryCollector-to-get-all-the-vcenter-events.md b/docs/posts/2018/2018-07-25-use-pyvmomi-EventHistoryCollector-to-get-all-the-vcenter-events.md new file mode 100644 index 00000000..fd477994 --- /dev/null +++ b/docs/posts/2018/2018-07-25-use-pyvmomi-EventHistoryCollector-to-get-all-the-vcenter-events.md @@ -0,0 +1,138 @@ +--- +authors: +- copdips +categories: +- python +- vmware +comments: true +date: + created: 2018-07-25 +description: pyVmomi event manager returns only the last 1000 events. But EventHistoryCollector + object's ReadNextEvents()method can collect all the events. +--- + +# Use pyVmomi EventHistoryCollector to get all the vCenter events + +pyVmomi eventManager's QueryEvents() method returns by default only the last 1000 events occurred on the vCenter. I will show you how to use another method CreateCollectorForEvents() to create an EventHistoryCollector object and then we use this object to collect all the events in a given time range by using its method ReadNextEvents(). + + + +## An example of QueryEvents method from the eventManager object + +Let's see [an example](https://github.com/vmware/pyvmomi-community-samples/blob/master/samples/relocate_events.py#L66) given by the pyVmomi samples community. + +```python +# ...some code ignored... +byEntity = vim.event.EventFilterSpec.ByEntity(entity=vm, recursion="self") +ids = ['VmRelocatedEvent', 'DrsVmMigratedEvent', 'VmMigratedEvent'] +filterSpec = vim.event.EventFilterSpec(entity=byEntity, eventTypeId=ids) +# ...some code ignored... +eventManager = si.content.eventManager +events = eventManager.QueryEvent(filterSpec) +# ...some code ignored... +``` + +From the above code example, we can find that the author wants to collect the vCenter events where the event types are limited to `ids` and the event entity is limited to `byEntity`. He creates a [filterSpec](https://pubs.vmware.com/vsphere-6-5/topic/com.vmware.vspsdk.apiref.doc/vim.event.EventFilterSpec.html) based on these 2 limitations and creates an [eventManager](https://pubs.vmware.com/vsphere-6-5/topic/com.vmware.vspsdk.apiref.doc/vim.event.EventManager.html) object, than passes the filterSpec to the method [QueryEvent](https://pubs.vmware.com/vsphere-6-5/topic/com.vmware.vspsdk.apiref.doc/vim.event.EventManager.html) to collect the events. + +The code works well, but you will find that in anyway, it will only returns maximum 1000 events. This is because eventManger uses the default event collector which pages all the events in a size of 1000 events (by default and also the maximum value) per page, and returns only the [last page](https://pubs.vmware.com/vsphere-6-5/index.jsp?topic=%2Fcom.vmware.vspsdk.apiref.doc%2Fvim.event.EventManager.html&resultof=%22%65%76%65%6e%74%6d%61%6e%61%67%65%72%22%20%22%65%76%65%6e%74%6d%61%6e%61%67%22%20). + +## An example of Get-VIEvent from PowerCLI + +In PowerCLI, we have the cmdlet [Get-VIEvent +](http://pubs.vmware.com/vsphere-6-5/topic/com.vmware.powercli.cmdletref.doc/Get-VIEvent.html) which can get all the events without the limitation of 1000 events. + +```powershell +Connect-VIServer -Server 10.23.113.41 +$events = Get-VIEvent -Start (Get-Date).AddDays(-1) +``` + +It works perfectly, but please take care of this site note: + +!!! note + + "Calling Get-VIEvent without any parameters might result in significant delays depending on the total number of events on the server." + +This note tells us that the cmdlet might take a long time to finish if there're too many events. In fact, that is also what I will show you in the below paragraph, Get-VIEvent uses the [EventHistoryCollector](https://pubs.vmware.com/vsphere-6-5/topic/com.vmware.vspsdk.apiref.doc/vim.event.EventHistoryCollector.html) to walk through all the events pages, and returns them all in the end. + +## Use EventHistoryCollector to collect all the events + +Finally, here comes our protagonist, the [EventHistoryCollector](https://pubs.vmware.com/vsphere-6-5/topic/com.vmware.vspsdk.apiref.doc/vim.event.EventHistoryCollector.html). + +The EventHistoryCollector can be created by eventManger by using the `CreateCollectorForEvents(filter)` method. The EventHistoryCollector has a magic method: `ReadNextEvents()`. + +> ReadNextEvents +> +> Reads the 'scrollable view' from the current position. The scrollable position is moved to the next newer page after the read. No item is returned when the end of the collector is reached. + +From it's description, we can know that it reads all the events from the current page, than it jumps to the next page. EventHistoryCollector has also a `ReadPreviousEvents()` method that does exactly the same thing but jumps back to the previous page. + +**So, now we need to ensure from where (which event page) starting the EventHistoryCollector.** + +From the [EventHistoryCollector doc](https://pubs.vmware.com/vsphere-6-5/topic/com.vmware.vspsdk.apiref.doc/vim.event.EventHistoryCollector.html), we find it inherits from HistoryCollector: + +> Managed Object - EventHistoryCollector(vim.event.EventHistoryCollector) +> +> Returned by +> +> CreateCollectorForEvents +> +> Extends +> +> HistoryCollector + +A quick search on [HistoryCollector](https://pubs.vmware.com/vsphere-6-5/topic/com.vmware.wssdk.smssdk.doc/vim.HistoryCollector.html), we find it has a method `RewindCollector()`: + +>RewindCollector(rewind) +> +>Moves the "scrollable view" to the oldest item. If you use ReadNextTasks or ReadNextEvents, all items are retrieved from the oldest item to the newest item. **This is the default setting when the collector is created**. + +The last sentence tells us that the default starting page of the EventHistoryCollector is the oldest one, or we can call it the first page in an human readable manner, so we can use `ReadNextEvents()` to read all the events page by page. + +!!! note + + If you want to set the EventHistoryCollector's starting point to the newest page (the last page), you can use the `ResetCollector(reset)` method. + +Finally, hereunder the sample code to collect all the vCenter events in the past hour: + +{% highlight python linenos %} +from datetime import datetime, timedelta + +from pyVim.connect import SmartConnectNoSSL +from pyVmomi import vim + +time_filter = vim.event.EventFilterSpec.ByTime() +now = datetime.now() +time_filter.beginTime = now - timedelta(hours=1) +time_filter.endTime = now +event_type_list = [] + +## If you want to also filter on certain events, uncomment the below event_type_list. + +## The EventFilterSpec full params details: + +## https://pubs.vmware.com/vsphere-6-5/topic/com.vmware.wssdk.smssdk.doc/vim.event.EventFilterSpec.html + +## event_type_list = ['VmRelocatedEvent', 'DrsVmMigratedEvent', 'VmMigratedEvent'] +filter_spec = vim.event.EventFilterSpec(eventTypeId=event_type_list, time=time_filter) + +si = SmartConnectNoSSL(host=host, user=user, pwd=password, port=port) +eventManager = si.content.eventManager +event_collector = eventManager.CreateCollectorForEvents(filter_spec) +page_size = 1000 # The default and also the max event number per page till vSphere v6.5, you can change it to a smaller value by SetCollectorPageSize(). +events = [] + +while True: + # If there's a huge number of events in the expected time range, this while loop will take a while. + events_in_page = event_collector.ReadNextEvents(page_size) + num_event_in_page = len(events_in_page) + if num_event_in_page == 0: + break + events.extend(events_in_page) # or do other things on the collected events +## Please note that the events collected are not ordered by the event creation time, you might find the first event in the third page for example. + +print( + "Got totally {} events in the given time range from {} to {}.".format( + len(events), time_filter.beginTime, time_filter.endTime + ) +) +{% endhighlight %} diff --git a/docs/posts/2018/2018-07-28-use-python-tabulate-module-to-create-tables.md b/docs/posts/2018/2018-07-28-use-python-tabulate-module-to-create-tables.md new file mode 100644 index 00000000..afbeec82 --- /dev/null +++ b/docs/posts/2018/2018-07-28-use-python-tabulate-module-to-create-tables.md @@ -0,0 +1,67 @@ +--- +authors: +- copdips +categories: +- python +- markdown +comments: true +date: + created: 2018-07-28 +description: Use the tabulate module to create some tables in pure text mode from + a python list, than you can past it into markdown, wiki files or python cli. +--- + +# Use python tabulate module to create tables + +If you want to create some tables from a python list, you can use the `tabulate` module, it can generate the table easily in text mode and in many formats, than you can past it into markdown, wiki files or add the print version to your python CLI in order to give a beautiful output to the CLI users. + + + +## Install python tabulate module + +```powershell +> pip install tabulate +``` + +## How to use tabulate + +[The official doc](https://bitbucket.org/astanin/python-tabulate) has already included nearly everything. + +## How to print in markdown, rst, wiki, html formats + +For rst, wiki, html formats, the official doc has already clearly given it, but for markdown, it's not mentioned. After the test, the `"pipe"` format from [PHP Markdown Extra](https://michelf.ca/projects/php-markdown/extra/#table) is compatible to markdown. + +| file | tabulate format (tablefmt) | +|:-------------|:-----------------------------| +| rst | "rst" | +| **markdown** | **"pipe"** | +| mediawiki | "mediawiki" | +| html | "html" | + +!!! note + + Update 2019-04-23: When I tested the lastest tabulate version 0.8.3, it added support also `github` format. + +!!! note + + Html code can be injected into Markdown file. + +## Visualize all the formats + +```python +from tabulate import _table_formats, tabulate + + +format_list = list(_table_formats.keys()) +# current format list in tabulate version 0.8.3: +# ['simple', 'plain', 'grid', 'fancy_grid', 'github', 'pipe', 'orgtbl', 'jira', 'presto', 'psql', 'rst', 'mediawiki', 'moinmoin', 'youtrack', 'html', 'latex', 'latex_raw', 'latex_booktabs', 'tsv', 'textile'] + + +# Each element in the table list is a row in the generated table +table = [["spam",42], ["eggs", 451], ["bacon", 0]] +headers = ["item", "qty"] + +for f in format_list: + print("\nformat: {}\n".format(f)) + print(tabulate(table, headers, tablefmt=f)) +``` diff --git a/docs/posts/2018/2018-07-29-convert-markdown-or-rst-to-atlassian-confluance-documentation-format.md b/docs/posts/2018/2018-07-29-convert-markdown-or-rst-to-atlassian-confluance-documentation-format.md new file mode 100644 index 00000000..549cf82f --- /dev/null +++ b/docs/posts/2018/2018-07-29-convert-markdown-or-rst-to-atlassian-confluance-documentation-format.md @@ -0,0 +1,37 @@ +--- +authors: +- copdips +categories: +- markdown +comments: true +date: + created: 2018-07-29 +description: Atlassian Confluance is not fully markdown friendly yet, to versioning + your doc in pure text mode, you can use html format as a bridge. +--- + +# Convert markdown or rst to Atlassian Confluance documentation format + +A recent working experience needed me to write doc on Atlassian Confluance documentation product. I will show you how to convert your markdown doc to Confluance version. + + + +## Convert markdown or rst to Confluance + +Confluance's web doc editor is very powerfull, but I a markdown guy, I write everything in markdown in pure text mode and versioning it. I need sth. to convert markdown to Confluance. + +Checked on [the official doc](https://confluence.atlassian.com/doc/confluence-wiki-markup-251003035.html#ConfluenceWikiMarkup-markdownCanIinsertmarkdown?), it says that Confluence supports markdown import, but after a test, not really, at least not for table. + +**Solution:** + +Convert the markdown or rst files to a HTML file. + +There're many plugins on the net, I use VSCode editor, I choose the extension [Markdown All in One](https://marketplace.visualstudio.com/items?itemName=yzhang.markdown-all-in-one), it has a method called "Markdown: Print current document to HTML". + +Once I get the HTML version, than just past the HTML content into Confluence directly. Done. + +Here's [the tutorial on how to insert the HTML marco](https://confluence.atlassian.com/doc/html-macro-38273085.html). + +## Convert mediawiki to Confluance + +Checked on [the official doc](https://confluence.atlassian.com/doc/confluence-wiki-markup-251003035.html#ConfluenceWikiMarkup-CanItypewikimarkupintotheeditor?), it says that Confluence supports wiki import, but I haven't tested yet. diff --git a/docs/posts/2018/2018-09-05-windows-scheduled-task-by-powershell.md b/docs/posts/2018/2018-09-05-windows-scheduled-task-by-powershell.md new file mode 100644 index 00000000..914f6bc0 --- /dev/null +++ b/docs/posts/2018/2018-09-05-windows-scheduled-task-by-powershell.md @@ -0,0 +1,235 @@ +--- +authors: +- copdips +categories: +- scheduling +- powershell +comments: true +date: + created: 2018-09-05 +description: Create, get, migrate Windows scheduled task by a pure Powershell way. +--- + +# Use Powershell to manage Windows Scheduled Task + +A recent project made me to use the Windows scheduled task to execute periodically some python scripts. After the project, I find using Powershell to manage the Windows scheduled task is not so straightforward, that's why I opened this post to share my experience on some common usage, and hope this can save your time if you also need to use it. + + + +## Scheduled task Powershell cmdlets + +From the official Windows scheduled task powershell [doc](https://docs.microsoft.com/en-us/powershell/module/scheduledtasks/), we can find the ScheduledTasks module provides many cmdlets: + +- Disable-ScheduledTask +- Enable-ScheduledTask +- Export-ScheduledTask +- Get-ScheduledTask +- Get-ScheduledTaskInfo +- New-ScheduledTask +- New-ScheduledTaskAction +- New-ScheduledTaskPrincipal +- New-ScheduledTaskSettingsSet +- New-ScheduledTaskTrigger +- Register-ScheduledTask +- Set-ScheduledTask +- Start-ScheduledTask +- Stop-ScheduledTask +- Unregister-ScheduledTask + +Guess what is the cmdlet to create the task? New-ScheduledTask? Wrong, it's Register-ScheduledTask. + +## Create scheduled task folder + +By default, all the scheduled tasks are created under the root "\\" folder, if you have many tasks here, from the taskschd.msc GUI, it might take time to display all of them. So I suggest to [create your tasks to some custom task folders](https://blogs.technet.microsoft.com/heyscriptingguy/2015/01/15/use-powershell-to-create-scheduled-tasks-folders/). And withthat, you can easily filter on (Get-ScheduledTask -TaskPath) only your interested tasks especially if some other tasks have the similar name as the yours. + +```powershell +# Create a task folder named 'project1' under the root path \ +$taskFolderName = 'project1' +$taskPath = "\$taskFolderName" +$scheduleObject = New-Object -ComObject schedule.service +$scheduleObject.connect() +$taskRootFolder = $scheduleObject.GetFolder("\") +$taskRootFolder.CreateFolder($taskPath) +``` + +## Disable disabledomaincreds + +In some corporate networks, the Windows or security admins might enable the security policy : `Network access: Do not allow storage of passwords and credentials for network authentication`. If this policy is enabled, we will not be able to use Register-ScheduledTask with the `-User` param. Its registry setting can be found from the [Microsoft official excel file for Group Policy Settings Reference for Windows and Windows Server](https://www.microsoft.com/en-us/download/confirmation.aspx?id=25250), There's also a online variant here: (http://gpsearch.azurewebsites.net/). + +```powershell +# Set the key 'disabledomaincreds' to value 0 to disable it. +$regPath = 'HKLM:\SYSTEM\CurrentControlSet\Control\Lsa' +$regName = 'disabledomaincreds' +$regValue = 0 +Set-ItemProperty -Path $regPath -Name $regName -Value $regValue +``` + +## Create scheduled task + +Suppose we need to : +- Run the script 'd:/scripts/job1.ps1 arg1' every 30 minutes from 2018-09-05T18:00:00. +- The script should be stopped if it runs more then 15 minutes. +- The script should be executed under the account 'user1' with the password 'password1'. +- The task should be in the 'project1' task folder. +- The task name is 'task1'. + +```powershell +$taskName = 'task1' +$taskFolderName = 'project1' +$taskPath = "\$taskFolderName" +$taskUser = 'user1' +$taskPassword = 'password1' # $taskPassword is given by un-secure clear string, it's only for demo. In addition, if you use clear string, please careful with the single-quoted because some passwords might contain the char $ which can be evaluated if you use the double-quoted string. https://docs.microsoft.com/en-us/powershell/module/microsoft.powershell.core/about/about_quoting_rules?view=powershell-6 +$taskAction = New-ScheduledTaskAction -Execute powershell.exe -Argument 'd:/scripts/job1.ps1 arg1' + +# some online docs give also the param -RepetitionDuration ([TimeSpan]::MaxValue), +# but this doesn't work for windows server 2016, it's a bug, this is also why I wrote this post. +# On windows server 2016, there's another bug is that we must use -Once -RepetitionInterval to run a repeated action, +# not -daily with 'Repeat task every 30 minutes' option in the advanced settings which is an option available only from taskschd.msc GUI, +# otherwise, the scheduled task will also be automatically triggered by the system reboot. +$taskTrigger = New-ScheduledTaskTrigger -Once -RepetitionInterval (New-TimeSpan -Minutes 30) -At (get-date '2018-09-05T18:00:00') + +# the task is scheduled for every 30 minutes, +# but if it is running more than 15 minutes, +# I consider the task is hanging or failed, and I want to stop it. +$taskSetting= New-ScheduledTaskSettingsSet -ExecutionTimeLimit (New-TimeSpan -Minutes 15) + + +# finally create the task +$registerScheduledTaskParam = @{ + TaskName = $taskName + TaskPath = $taskPath + User = $taskUser + Password = $taskPassword + Action = $taskAction + Trigger = $taskTrigger + Settings = $taskSetting +} +Register-ScheduledTask @registerScheduledTaskParam + +TaskPath TaskName State +-------- -------- ----- +\project1\ task1 Ready +``` + +## Get scheduled task info + +```powershell +$taskFolderName = 'project1' +# when using Register-SchduledTask, one of its params $taskPath = "\$taskFolderName" +# doesn't contain the the ending "\", but for Get-ScheduledTask, +# we must add it. This is a bug on Windows Server 2016 at least. +$taskPath = "\$taskFolderName\" +Get-ScheduledTask $taskName | fl * + +State : Ready +Actions : {MSFT_TaskExecAction} +Author : +Date : +Description : +Documentation : +Principal : MSFT_TaskPrincipal2 +SecurityDescriptor : +Settings : MSFT_TaskSettings3 +Source : +TaskName : task1 +TaskPath : \project1\ +Triggers : {MSFT_TaskTimeTrigger} +URI : \project1\task1 +Version : +PSComputerName : +CimClass : Root/Microsoft/Windows/TaskScheduler:MSFT_ScheduledTask +CimInstanceProperties : {Actions, Author, Date, Description...} +CimSystemProperties : Microsoft.Management.Infrastructure.CimSystemProperties + +# The result shown above is not straightforward for admin, +# we need to deep into many sub properties to get some scheduled task basic information. +# Hereunder some ways to archive that. + +# Way 1: oneliner by using the custom properties +$taskPath = "\project1\" ; Get-ScheduledTask -TaskPath $taskPath | Select-Object ` +TaskName, State, ` +@{n='TaskEnabled'; e={$_.Settings.Enabled}}, ` +@{n='TriggerEnabled'; e={$_.Triggers.Enabled}}, ` +@{n='User'; e={$_.Principal.UserID}}, ` +@{n='TriggerStartBoundary'; e={$_.Triggers.StartBoundary}}, ` +@{n='TriggerInterval'; e={$_.Triggers.Repetition.Interval}}, ` +@{n='ExecutionTimeLimit'; e={$_.Settings.ExecutionTimeLimit}},` +@{n='LastRunTime'; e={$_ | Get-ScheduledTaskInfo | % LastRunTime}}, ` +@{n='LastTaskResult'; e={$_ | Get-ScheduledTaskInfo | % LastTaskResult}}, ` +@{n='NextRunTime'; e={$_ | Get-ScheduledTaskInfo | % NextRunTime}}, ` +@{n='Action'; e={$_.Actions.Execute + ' ' + $_.Actions.Arguments}} + +# Way 2: Export the task config to XML and view the XML content directly +$taskPath = "\project1\" ; Get-ScheduledTask -TaskPath $taskPath | % {Write-Host "`nTask: $($_.TaskName)" -BackgroundColor Red ; Export-ScheduledTask $_ ; $_ | Get-ScheduledTaskInfo} +``` + +## Get scheduled task log + +It seems that there's no cmdlet to get the task log from the *-ScheduledTask cmdlets list. Yes, you're right, the task log is saved directly to the standard windows event log. You can use Get-WinEvent (Get-EventLog is an old way) to get it. + +```powershell +# if you're not admin on the server, +# you might get some error when running below Get-WinEvent command, +# you can set $ErrorActionPreference = "SilentlyContinue" to hide it. +> Get-WinEvent -ListLog * | ? logname -match task + +LogMode MaximumSizeInBytes RecordCount LogName +------- ------------------ ----------- ------- +Circular 1052672 32 Microsoft-Windows-BackgroundTaskInfrastructure/Operational +Circular 1052672 0 Microsoft-Windows-Mobile-Broadband-Experience-Parser-Task/Operational +Circular 1052672 0 Microsoft-Windows-Shell-Core/LogonTasksChannel +Circular 1052672 636 Microsoft-Windows-TaskScheduler/Maintenance +Circular 10485760 Microsoft-Windows-TaskScheduler/Operational + +# No RecordCount for Microsoft-Windows-TaskScheduler/Operational +# because the log history is disabled by default, enable it by wevtutil. +> wevtutil set-log Microsoft-Windows-TaskScheduler/Operational /enabled:true +> wevtutil get-log Microsoft-Windows-TaskScheduler/Operational +> Get-WinEvent -ListLog * | ? logname -match task + +LogMode MaximumSizeInBytes RecordCount LogName +------- ------------------ ----------- ------- +Circular 1052672 32 Microsoft-Windows-BackgroundTaskInfrastructure/Operational +Circular 1052672get- 0 Microsoft-Windows-Mobile-Broadband-Experience-Parser-Task/Operational +Circular 1052672 0 Microsoft-Windows-Shell-Core/LogonTasksChannel +Circular 1052672 636 Microsoft-Windows-TaskScheduler/Maintenance +Circular 10485760 12 Microsoft-Windows-TaskScheduler/Operational + +# All the logs of all the tasks are saved in the same place, +# and the event object doesn't have a task name property, +# this is why when we view the task history from the taskschd.msc GUI, +# it's too slow to display, not cool /_\. +# So if we want to see the logs of a single task, there's still something to do. +> Get-WinEvent -FilterHashtable @{logname="Microsoft-Windows-TaskScheduler/Operational"; StartTime=$(get-date).AddDays(-2)} | so -fir 1 | fl * + + +Message : Task Scheduler launched "{F14F3BF1-DAA7-4286-93BF-1BB1EE3B2C0C}" instance of task "\project1\task1" for user "user1" . +Id : 110 +Version : 0 +Qualifiers : +Level : 4 +Task : 110 +Opcode : 0 +Keywords : -9223372036854775808 +RecordId : 6 +ProviderName : Microsoft-Windows-TaskScheduler +ProviderId : de7b24ea-73c8-4a09-985d-5bdadcfa9017 +LogName : microsoft-windows-taskscheduler/operational +ProcessId : 1612 +ThreadId : 10152 +MachineName : DELL-ZX +UserId : S-1-5-18 +TimeCreated : 2018-09-05 00:48:37 +ActivityId : f14f3bf1-daa7-4286-93bf-1bb1ee3b2c0c +RelatedActivityId : +ContainerLog : microsoft-windows-taskscheduler/operational +MatchedQueryIds : {} +Bookmark : System.Diagnostics.Eventing.Reader.EventBookmark +LevelDisplayName : Information +OpcodeDisplayName : Info +TaskDisplayName : Task triggered by user +KeywordsDisplayNames : {} +Properties : {System.Diagnostics.Eventing.Reader.EventProperty, System.Diagnostics.Eventing.Reader.EventProperty, System.Diagnostics.Eventing.Reader.EventProperty} + +> Get-WinEvent -FilterHashtable @{logname="Microsoft-Windows-TaskScheduler/Operational"; StartTime=$(get-date).AddDays(-2)} | ? message -match "\\project1\\task1" +``` diff --git a/docs/posts/2018/2018-09-06-install-gitlab-ce-in-docker-on-ubuntu.md b/docs/posts/2018/2018-09-06-install-gitlab-ce-in-docker-on-ubuntu.md new file mode 100644 index 00000000..fd39c8ec --- /dev/null +++ b/docs/posts/2018/2018-09-06-install-gitlab-ce-in-docker-on-ubuntu.md @@ -0,0 +1,107 @@ +--- +authors: +- copdips +categories: +- gitlab +- cicd +- docker +- ubuntu +comments: true +date: + created: 2018-09-06 +description: Step by step installation of Gitlab-CE in docker on Ubuntu server. +--- + +# Install Gitlab-CE in Docker on Ubuntu + +Gitlab-CE (Community Edition) is a completely free and powerful web-based Git-repository manager with wiki, issue-tracking and CI/CD pipeline features, using an open-source license, developed by GitLab Inc. There're already many much better docs on the net, I've never worked with Docker and Linux before, so I wrote this post to save my way to install the Gitlab docker version on Ubuntu, the post is more or less for personal purpose. + + + +## Install Ubuntu server on Hyper-V + +1. Enabled the free Hyper-V role on the Windows 10 PC. +2. Install Ubuntu server on the Hyper-V. (I used "Ubuntu 18.04.1 LTS") + +!!! warning + + Warning: **Don't install the snap version of Docker** during the Ubuntu install, I failed to run the Docker image after. There's an error saying that: *"docker: Error response from daemon: error while creating mount source path '/srv/gitlab/logs': mkdir /srv/gitlab: read-only file system."*. To remove the Docker snap: `sudo snap remove docker`. + +## Install Docker on Ubuntu + +Here is the [official doc](https://docs.docker.com/install/linux/docker-ce/ubuntu/) for installing Docker on Ubuntu, just follow the procedure step by step. + +The docker group is created but no users are added to it. You need to use sudo to run Docker commands. Continue to [Linux postinstall](https://docs.docker.com/install/linux/linux-postinstall/) to allow non-privileged users to run Docker commands and for other optional configuration steps. + +To verify Docker is running fine, we can try to run a hello-world image : +```bash +xiang@ubuntu1804:~$ docker run hello-world +Unable to find image 'hello-world:latest' locally +latest: Pulling from library/hello-world +9db2ca6ccae0: Pull complete +Digest: sha256:4b8ff392a12ed9ea17784bd3c9a8b1fa3299cac44aca35a85c90c5e3c7afacdc +Status: Downloaded newer image for hello-world:latest + +Hello from Docker! +This message shows that your installation appears to be working correctly. + +To generate this message, Docker took the following steps: + 1. The Docker client contacted the Docker daemon. + 2. The Docker daemon pulled the "hello-world" image from the Docker Hub. + (amd64) + 3. The Docker daemon created a new container from that image which runs the + executable that produces the output you are currently reading. + 4. The Docker daemon streamed that output to the Docker client, which sent it + to your terminal. + +To try something more ambitious, you can run an Ubuntu container with: + $ docker run -it ubuntu bash + +Share images, automate workflows, and more with a free Docker ID: + https://hub.docker.com/ + +For more examples and ideas, visit: + https://docs.docker.com/engine/userguide/ +``` + +## Install Gitlab CE in Docker + +Here is the [official Gitlab Docker doc](https://docs.gitlab.com/omnibus/docker/), I really thank the Gitlab team, their doc system is one of the bests that I've ever seen. Another [doc from IBM](https://developer.ibm.com/code/2017/07/13/step-step-guide-running-gitlab-ce-docker/) is also good. Run the following commands to install Gitlab-CE in Docker. + +```bash +xiang@ubuntu1804:~$ docker run --detach \ +--hostname gitlab.copdips.local \ +--publish 443:443 --publish 80:80 --publish 2222:22 \ +--name gitlab \ +--restart always \ +--volume /srv/gitlab/config:/etc/gitlab \ +--volume /srv/gitlab/logs:/var/log/gitlab \ +--volume /srv/gitlab/data:/var/opt/gitlab \ +gitlab/gitlab-ce:latest + +xiang@ubuntu1804:~$ docker ps +CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES +707439b39dd1 gitlab/gitlab-ce:latest "/assets/wrapper" 3 minutes ago Up 3 minutes (health: starting) 0.0.0.0:80->80/tcp, 0.0.0.0:443->443/tcp, 0.0.0.0:2222->22/tcp gitlab +``` + +!!! warning + + Warning: I use `--publish 2222:22` instead of `--publish 22:22` which is given by the official [Run the docker image](https://docs.gitlab.com/omnibus/docker/#run-the-image) doc, this is to avoid using the default SSH port (TCP 22) already binded to the Docker host, our Ubuntu server. + +!!! warning + + Warning: `Do NOT use port 8080` otherwise there will be conflicts. This port is already used by Unicorn that runs internally in the container. + +!!! note + + Note: There's also a [Docker compose](https://docs.gitlab.com/omnibus/docker/#update-gitlab-using-docker-compose) way to install Gitlab-CE. + +## Check Gitlab + +Open your browser, go to http://YourUbuntuServerIP/, you should see the Gitlab login page. On this page, you need to set the Gitlab root user initial password. + +If you like to use HTTPS, you need to generate a SSL certificate and add it to Gitlab the config file. + +## Run Gitlab in Kubernetes + +IBM has provided a [doc](https://github.com/IBM/Kubernetes-container-service-GitLab-sample/blob/master/README.md) about it. diff --git a/docs/posts/2018/2018-09-16-setup-https-for-gitlab.md b/docs/posts/2018/2018-09-16-setup-https-for-gitlab.md new file mode 100644 index 00000000..c6c4cf1d --- /dev/null +++ b/docs/posts/2018/2018-09-16-setup-https-for-gitlab.md @@ -0,0 +1,272 @@ +--- +authors: +- copdips +categories: +- gitlab +- cicd +- certificate +- ubuntu +comments: true +date: + created: 2018-09-16 +description: Setup a SAN SSL certificate to use the HTTPS on Gitlab-CE in docker on + Ubuntu server. +--- + +# Setup HTTPS for Gitlab + +Gitlab-CE default installation goes with HTTPS disable. We need to generate a SSL certificate, and bind it to the HTTPS of Gitlab-CE. + + + +## Some docs on the Internet + +1. [Gitlab omnibus SSL settings](https://docs.gitlab.com/omnibus/settings/ssl.html) +2. [Gitlab omnibus enable HTTPS](https://docs.gitlab.com/omnibus/settings/nginx.html#enable-https) +3. [Generate a self-signed certificate with openssl](https://stackoverflow.com/questions/10175812/how-to-create-a-self-signed-certificate-with-openssl) +4. [How to install and configure Gitlab on Ubuntu 16.04](https://www.digitalocean.com/community/tutorials/how-to-install-and-configure-gitlab-on-ubuntu-16-04) +5. [[Deprecated] How To Secure GitLab with Let's Encrypt on Ubuntu 16.04](https://www.digitalocean.com/community/tutorials/how-to-secure-gitlab-with-let-s-encrypt-on-ubuntu-16-04) + +## Generate self-signed SSL certificate without SAN + +## Online docs for SSL certificate without SAN + +1. [Creating a Self-Signed SSL Certificate](https://devcenter.heroku.com/articles/ssl-certificate-self) +2. [How To Run Gitlab With Self Signed Ssl Certificate](https://futurestud.io/tutorials/how-to-run-gitlab-with-self-signed-ssl-certificate) + +## Generate SSL certificate private key + +```bash +xiang@ubuntu1804:~/ssl$ sudo openssl genrsa -out "./gitlab.copdips.local.key" 2048 +Generating RSA private key, 2048 bit long modulus +............+++ +..+++ +e is 65537 (0x010001) +``` + +## Generate SSL certificate request + +Without the switch `-config`, the generation of csr request will ask you some information about company, email, and passphrasem etc. If you dont want OpenSSL to ask you that, you need to prepare a config file and specify it by `-config [YourConfigPath]`, and config example can be found in the paragraph [Prepare the OpenSSL config file](#prepare-the-openssl-config-file). + +```bash +xiang@ubuntu1804:~/ssl$ sudo openssl req -new -key "gitlab.copdips.local.key" -out "gitlab.copdips.local.csr" +You are about to be asked to enter information that will be incorporated +into your certificate request. +What you are about to enter is what is called a Distinguished Name or a DN. +There are quite a few fields but you can leave some blank +For some fields there will be a default value, +If you enter '.', the field will be left blank. +----- +Country Name (2 letter code) [AU]: +State or Province Name (full name) [Some-State]: +Locality Name (eg, city) []: +Organization Name (eg, company) [Internet Widgits Pty Ltd]:copdips +Organizational Unit Name (eg, section) []: +Common Name (e.g. server FQDN or YOUR name) []:gitlab.copdips.local +Email Address []: + +Please enter the following 'extra' attributes +to be sent with your certificate request +A challenge password []: +An optional company name []: +``` + +## Generate SSL certificate + +OpenSSL has the option to generate the certificate in one line, this post splits it into 3 steps (the private key, the request file, and the certificate) in order to get a clear understanding of the certificate generation procedure. + +```bash +xiang@ubuntu1804:~/ssl$ sudo openssl x509 -req -days 1000 -in gitlab.copdips.local.csr -signkey gitlab.copdips.local.key -out gitlab.copdips.local.crt -extfile gitlab.copdips.local.cnf -extension v3_req +Signature ok +subject=C = AU, ST = Some-State, O = copdips, CN = gitlab.copdips.local +Getting Private key +``` + +## Review the SSL certificate content + +```bash +xiang@ubuntu1804:~/ssl$ openssl x509 -in gitlab.copdips.local.crt -text -noout +Certificate: + Data: + Version: 1 (0x0) + Serial Number: + b4:96:ba:89:62:7b:32:83 + Signature Algorithm: sha256WithRSAEncryption + Issuer: C = AU, ST = Some-State, O = copdips, CN = gitlab.copdips.local + Validity + Not Before: Sep 13 22:05:40 2018 GMT + Not After : Jun 9 22:05:40 2021 GMT + Subject: C = AU, ST = Some-State, O = copdips, CN = gitlab.copdips.local + Subject Public Key Info: + Public Key Algorithm: rsaEncryption + Public-Key: (2048 bit) +``` + +**DO NOT** use password protected certificate key (in case the lack of the switch -nodes for *no DES*), to [remove the password from the key](https://docs.gitlab.com/omnibus/settings/nginx.html#manually-configuring-https): +!!! warning + + `openssl rsa -in certificate_before.key -out certificate_after.key` + +## Generate self-signed SAN SSL certificate + +## Online docs for SSL certificate with SAN + +I tested many methods found on the Internet, most of them don't work. Finally, I followed the [doc maintained by Citrix](https://support.citrix.com/article/CTX135602). This should be a trusted one as Netscaler is a key product in Citrix, the doc is always updated with the latest version of OpenSSL. +With time going by, the procedure might change, if below procedure doesn't work, please go to check the Citrix online doc directly. + +## Prepare the OpenSSL config file + +Prepare an OpenSSL config file. On Ubuntu 1804, an OpenSSL config example can be found at: `/usr/lib/ssl/openssl.cnf`. +Or You can find the path from the command: `openssl version -a | grep OPENSSLDIR`. You might need to change the config according to your actual environment. + +```bash +xiang@ubuntu1804:~/ssl$ cat gitlab.copdips.local.cnf +[req] +prompt = no +default_bits = 2048 +x509_extensions = v3_req +distinguished_name = req_distinguished_name + +[req_distinguished_name] +organizationName = copdips +commonName = gitlab.copdips.local + +[v3_req] +subjectAltName = @alt_names + +[alt_names] +DNS.1 = *.copdips.local +DNS.2 = ubuntu1804 +DNS.3 = ubuntu1804.copdips.local +``` + +!!! warning + + Be careful with the usage of the `wildcard` in [alt_names], the above OpenSSL config is just an example to show what are the DNS names can be added to SAN. + +## Generate the SAN SSL certificate content + +Pay attention to `-extensions v3_req` in the end of the command, it's the extension tag name in the `gitlab.copdips.local.cnf` file. If you dont specify it, the output certificate won't have the extension part, so no SAN neither. + +```bash +xiang@ubuntu1804:~/ssl$ sudo openssl req -x509 -days 1000 -nodes -out gitlab.copdips.local.crt -keyout gitlab.copdips.local.key -config gitlab.copdips.local.cnf -extensions v3_req +Generating a 2048 bit RSA private key +...................................................+++ +...............................+++ +writing new private key to 'gitlab.copdips.local.key' +``` + +**DO NOT** use password protected certificate key (in case the lack of the switch -nodes for *no DES*), to [remove the password from the key](https://docs.gitlab.com/omnibus/settings/nginx.html#manually-configuring-https): +!!! warning + + `openssl rsa -in certificate_before.key -out certificate_after.key` + +## Review the SAN SSL certificate + +The `default Signature Algorithm` has been already `SHA256`. Some online docs tell to add the switch -sha256 when using openssl req, but it's deprecated with the new version of OpenSSL. BTW, the `RSA private key default bits` is `2048`. My OpenSSL version on Ubuntu 1804 is `OpenSSL 1.1.0g 2 Nov 2017` + +```bash +xiang@ubuntu1804:~/ssl$ openssl x509 -in gitlab.copdips.local.crt -noout -text +Certificate: + Data: + Version: 3 (0x2) + Serial Number: + d3:2c:bb:1d:6c:7e:7b:98 + Signature Algorithm: sha256WithRSAEncryption + Issuer: O = copdips, CN = gitlab.copdips.local + Validity + Not Before: Sep 15 22:00:55 2018 GMT + Not After : Jun 11 22:00:55 2021 GMT + Subject: O = copdips, CN = gitlab.copdips.local + Subject Public Key Info: + Public Key Algorithm: rsaEncryption + Public-Key: (2048 bit) + Modulus: + [...] + Exponent: 65537 (0x10001) + X509v3 extensions: + X509v3 Subject Alternative Name: + DNS:*.copdips.local, DNS:ubuntu1804, DNS:ubuntu1804.copdips.local + Signature Algorithm: sha256WithRSAEncryption + [...] +``` + +## Save the SSL certificate + +Create the folder `/etc/gitlab/ssl` with following two commands, and copy the SSL certificate and key here with the name of `[fqdn].crt` and `[fqnd].key`. + +```bash +root@gitlab:/# mkdir -p /etc/gitlab/ssl +root@gitlab:/# chmod 700 /etc/gitlab/ssl +xiang@ubuntu1804:~/ssl$ sudo cp ~/ssl/gitlab.copdips.local.key ~/ssl/gitlab.copdips.local.crt /srv/gitlab1083/config/ssl/ +``` + +!!! note + + `/srv/gitlab1083/ssl/` is the physical gitlab location on my Ubuntu server which is pointed to `/etc/gitlab/ssl` its docker container. + +## Configure HTTPS on Gitlab + +Hereunder the content of uncommented lines in the Gitlab configuration file: + +```bash +root@gitlab:/# grep "^[^#;]" /etc/gitlab/gitlab.rb + external_url 'https://gitlab.copdips.local' + nginx['redirect_http_to_https'] = true + nginx['ssl_certificate'] = "/etc/gitlab/ssl/gitlab.copdips.local.crt" + nginx['ssl_certificate_key'] = "/etc/gitlab/ssl/gitlab.copdips.local.key" +``` + +## Update Gitlab config + +When you changed the configuration file, to take effect: + +```bash +root@gitlab:/# gitlab-ctl reconfigure +``` + +## Check the website SSL certificate from the command line + +## By openssl for both Linux and Windows + +For Linux : +```bash +openssl s_client -connect gitlab.copdips.local:443 < /dev/null 2>/dev/null | openssl x509 -text -in /dev/stdin -noout +``` + +For Windows with OpenSSL installed: +```powershell +$null | openssl s_client -connect gitlab.copdips.local:443 | openssl x509 -text -noout +``` + +!!! note + + My OpenSSL is installed with GIT on Windows. [GitForWindows](https://gitforwindows.org/) installs also many other powerful Linux commands (grep, ssh, tail, and also vim, etc.) ported to Windows. + +## By certuil for Windows only + +You should explicitly download the certificate at first, and then view the content locally, so this method is not cool. +Hope Powershell team can get this done by one single cmdlet in the future Powershell releases. + +```powershell +$url = "https://gitlab.copdips.local" +$localCertPath = "$env:temp\$($url.Split('/')[2]).crt" +$webRequest = [Net.WebRequest]::Create($url) +try { $webRequest.GetResponse() } catch {} # try catch is useful if ssl cert is not valid. ServicePoint is always kept even for invalid ssl cert. +$cert = $webRequest.ServicePoint.Certificate +$bytes = $cert.Export("Cert") +Set-content -value $bytes -encoding byte -path $localCertPath +certutil.exe -dump $localCertPath +``` + +Or a nice cmdlet [`Test-WebServerSSL`](https://www.sysadmins.lv/blog-en/test-remote-web-server-ssl-certificate.aspx) written by the MVP Vadims Podāns. + +## Update the certificate in case of renewal + +Here is the [official doc](https://docs.gitlab.com/omnibus/settings/nginx.html#update-the-ssl-certificates). + +When you changed the SSL certificate, `gitlab-ctl reconfigure` won't take it into effect as there's nothing changed in the gitlab.rb configuration file. Use following command to update the certificate: + +```bash +gitlab-ctl hup nginx +``` diff --git a/docs/posts/2018/2018-09-20-install-gitlab-runner-on-windows-by-powershell-psremoting.md b/docs/posts/2018/2018-09-20-install-gitlab-runner-on-windows-by-powershell-psremoting.md new file mode 100644 index 00000000..b900dcf9 --- /dev/null +++ b/docs/posts/2018/2018-09-20-install-gitlab-runner-on-windows-by-powershell-psremoting.md @@ -0,0 +1,145 @@ +--- +authors: +- copdips +categories: +- gitlab +- cicd +- powershell +comments: true +date: + created: 2018-09-20 +description: Install Gitlab runner on Windows over winrm PsRemoting with full command + line. +--- + +# Install Gitlab Runner on Windows by Powershell PsRemoting + +Gitlab runner can be installed on Windows OS. For people like me who is more familiar with Windows, we would like to use Windows as a Gitlab runner. This post will give you a simplified procedure (winrm PsRemoting full command line) about its installation with some tips and tricks that I haven't seen anywhere on the Internet. + + + +## Some docs on the Internet + +The [official doc](https://docs.gitlab.com/runner/) is complete and clear enough. + +## Download Gitlab runner executable + +```powershell +# This command is runned from my Windows 10 desktop. +$paramIwr = @{ + Uri = "https://gitlab-runner-downloads.s3.amazonaws.com/latest/binaries/gitlab-runner-windows-amd64.exe"; + OutFile = "D:\xiang\Downloads\gitlab-runner-windows-amd64.exe" +} +Invoke-WebRequest @paramIwr +``` + +## Install Gitlab runner on Windows + +Some official docs: +1. [Install gitlab runner on windows](https://docs.gitlab.com/runner/install/windows.html). +2. [Gitlab-runner installation related commands](https://docs.gitlab.com/runner/commands/#gitlab-runner-install) + +My Gitlab runner is a fresh Windows server 2019 VM named **19S01**. + +```powershell +# Use WinRM over HTTPS is the simplest way to connect to an out of the box workgroup Windows server in lab. +$s19s01 = New-PSSession 19S01 -UseSSL -SessionOption (New-PSSessionOption -SkipCACheck) -Credential administrator + +# ntrights is in the Windows Server 2003 Resource Kit Tools +# https://www.microsoft.com/en-us/Download/confirmation.aspx?id=17657 +Copy-Item D:\xiang\Dropbox\tools\windows\rktools\ntrights.exe c:/temp -ToSession $s19s01 +Copy-Item D:\xiang\Downloads\gitlab-runner-windows-amd64.exe c:/temp -ToSession $s19s01 + +Enter-PSSession $s19S01 + +# If you need to use a domain account to run the gitlab-runner server, this way is not recommended. +# c:/temp/ntrights.exe ntrights +r SeServiceLogonRight -u Domain\DomainAccount + +New-Item d:/app/gitlab-runner -Type Directory -Force +Copy-Item C:\temp\gitlab-runner-windows-amd64.exe D:\app\gitlab-runner +Rename-Item D:\app\gitlab-runner\gitlab-runner-windows-amd64.exe gitlab-runner.exe + +# Install without any other params will install a windows service named gitlab-runner running under the built-in system account. +Set-Location D:\app\gitlab-runner +./gitlab-runner.exe install + +# If you need to bind a domain account to the gitlab runner service: +# I encountered some issue when installing gitlab runner service with the full exe path : D:\app\gitlab-runner\gitlab-runner.exe install, so I firstly go to the gitlab-runner.exe folder, than run the exe directly from there. +Set-Location D:\app\gitlab-runner +./gitlab-runner install --user ENTER-YOUR-USERNAME --password ENTER-YOUR-PASSWORD + +D:\app\gitlab-runner\gitlab-runner.exe status +``` + +## Register Gitlab runner on Windows + +Some official docs: + +1. [Register gitlab-runner on windows](https://docs.gitlab.com/runner/register/index.html#windows) +2. [One-line registration commands](https://docs.gitlab.com/runner/register/index.html#one-line-registration-command) +3. [Gitlab-runner registration related commands](https://docs.gitlab.com/runner/commands/#registration-related-commands) + +```powershell +Add-Content -Value "192.168.111.184`tgitlab.copdips.local" -Path C:\Windows\system32\drivers\etc\hosts + +# Add the gitlab self-signed certificate to runner's cert store. +$gitlabUrl = "https://gitlab.copdips.local" +$localCertPath = "$env:temp\$($gitlabUrl.Split('/')[2]).crt" +$webRequest = [Net.WebRequest]::Create($gitlabUrl) +try { $webRequest.GetResponse() } catch {} # try catch is useful if ssl cert is not valid. ServicePoint is always kept even for invalid ssl cert. +$cert = $webRequest.ServicePoint.Certificate +$bytes = $cert.Export("Cert") +Set-content -value $bytes -encoding byte -path $localCertPath + +# https://docs.microsoft.com/en-us/windows/desktop/seccertenroll/about-certificate-directory +Import-Certificate -FilePath $localCertPath -CertStoreLocation Cert:\LocalMachine\Root + +# Ensure the runner is stopped before the registration. +D:\app\gitlab-runner\gitlab-runner.exe stop +D:\app\gitlab-runner\gitlab-runner.exe status + +# Go to https://gitlab.copdips.local/win/flaskapi/settings/ci_cd and get the runner registration-token from this web site +# Dont add quotes around the registration-token. +# Pay attention to the usage of the stop-parsing symbol --% . +# http://copdips.com/2018/05/powershell-stop-parsing.html +D:\app\gitlab-runner\gitlab-runner.exe --% register -n --name 19s01 --url https://gitlab.copdips.local/ --registration-token Qdz3TyfnESrjSsmff6A9 --executor shell --shell powershell --tag-list 'windows,windows2016,flaskapi' --run-untagged true +D:\app\gitlab-runner\gitlab-runner.exe start +D:\app\gitlab-runner\gitlab-runner.exe status +``` + +!!! note + + Using Powershell Core `pwsh.exe` as a Windows Gitlab runner shell will be supported from the [version 11.8](https://gitlab.com/gitlab-org/gitlab-runner/issues/3291#note_111326306) + +## Check the Gitlab runner config from the runner server + +```powershell +# Dont be afraid of the error messages returned by gitlab-runner.exe list. +# The Powershell PsRemoting session is not as powerfull as local Powershell console, and some external executables like gitlab-runner.exe or git.exe send their outputs to stderr by default. +[19S01]: PS C:\temp> D:\app\gitlab-runner\gitlab-runner.exe list +D:\app\gitlab-runner\gitlab-runner.exe : Listing configured runners +ConfigFile=C:\Users\Administrator\Documents\config.toml + + CategoryInfo : NotSpecified: (Listing configu...nts\config.toml:String) [], RemoteException + + FullyQualifiedErrorId : NativeCommandError + +19s01 Executor=shell +Token=4a76cba042b1748e7546dad9f03458 URL=https://gitlab.copdips.local/ + +[19S01]: PS C:\temp> Get-Content (gcim cim_service | ? name -eq gitlab-runner | % path*).split(" ")[5] +concurrent = 1 +check_interval = 0 + +[[runners]] + name = "19s01" + url = "https://gitlab.copdips.local/" + token = "4a76cba042b1748e7546dad9f03458" + executor = "shell" + shell = "powershell" + [runners.cache] +``` + +## Check the Gitlab runner config from the Gitlab website + +Go to the Gitlab web site hosted in my Ubuntu docker container. Then go to the repo where I got the runner registration token previously. Than go to `Settings-> CI / CD Settings -> Runner Settings`, check your runner setting here, especially the tag list which is not listed from the [runner server local config](#check-the-gitlab-runner-config-from-the-runner-server). + +![](../../assets/blog_images/2018-09-20-install-gitlab-runner-on-windows-by-powershell-psremoting/gitlab-runner-settings-from-web.PNG) diff --git a/docs/posts/2018/2018-09-24-backup-and-restore-gitlab-in-docker.md b/docs/posts/2018/2018-09-24-backup-and-restore-gitlab-in-docker.md new file mode 100644 index 00000000..50ba4416 --- /dev/null +++ b/docs/posts/2018/2018-09-24-backup-and-restore-gitlab-in-docker.md @@ -0,0 +1,380 @@ +--- +authors: +- copdips +categories: +- gitlab +- cicd +- docker +- backup +- ubuntu +comments: true +date: + created: 2018-09-24 +description: Step by step procedure to backup and restore Gitlab in docker. +--- + +# Backup and restore Gitlab in docker + +Gitlab hosts everything about the code including the docs and the pipeline data, etc. It's crucial to back it up. You can also use restore to migrate the Gitlab to another server. This post will show you how to backup and restore the Gitlab-CE docker version. + + + +## Some docs on the Internet + +1. [Backing up and restoring Gitlab from docs.gitlab.com](https://docs.gitlab.com/ee/raketasks/backup_restore.html) +2. [Gitlab omnibus backup from docs.gitlab.com](https://docs.gitlab.com/omnibus/settings/backups.html) +3. [Gitlab Backup from codereviewvideos.com](https://codereviewvideos.com/course/your-own-private-github/video/gitlab-backup) +4. [GitLab Backup Made Easy from icicletech.com](https://www.icicletech.com/blog/gitlab-backup-made-easy) + +## Backup prerequisites + +## Tar version +The [official doc](https://docs.gitlab.com/ee/raketasks/backup_restore.html#requirements) says that the backup and restore tasks use tar with minimum `version 1.3`. Check the tar version by `tar --version`. The default tar version installed by Gitlab with docker (Gitlab-CE v10.8.3) is v1.28, after the test, the backup and restore both work well with tar in version v1.28. After the test, I find that the default tar v1.28 is also good. + +## VM snapshot + +If your Gitlab is installed on a VM, you can create a snapshot before any action. Please note that **snapshot is not a backup**, you should delete it as soon as your backup or restore task is completed. + +## Gitlab version + +Be aware that we can only restore to exactly the same version and type of Gitlab. The default backup file has the Gitlab version and type in the end of the file name which is in the format `EPOCH_YYYY_MM_DD_GitLab_version`. + +> : +> +> The backup archive will be saved in `backup_path`, which is specified in the `config/gitlab.yml` file. The filename will be `[TIMESTAMP]_gitlab_backup.tar`, where `TIMESTAMP` identifies the time at which each backup was created, plus the GitLab version. The timestamp is needed if you need to restore GitLab and multiple backups are available. +> +> For example, if the backup name is `1493107454_2018_04_25_10.6.4-ce_gitlab_backup.tar`, then the timestamp is `1493107454_2018_04_25_10.6.4-ce`. + +!!! warning + + `config/gitlab.yml` is migrated to `/etc/gitlab/gitlab.rb` in newer Gitlab version + +## Backup Gitlab in docker + +## Locate backup path + +`gitlab_rails['backup_path']` is commented in the Gitlab configuration file `gitlab.rb`, its value is the default backup path which is at `/var/opt/gitlab/backups`. + +```bash +# From Gitlab docker + +root@gitlab:/etc/gitlab# cat /etc/gitlab/gitlab.rb | grep backup_path +# gitlab_rails['manage_backup_path'] = true +# gitlab_rails['backup_path'] = "/var/opt/gitlab/backups" +``` + +## Create the backup + +You don't need to stop anything before creating the backup. + +```bash +# From Ubuntu host outside of the Gitlab docker + +xiang@ubuntu1804:~$ docker exec -it gitlab1083 gitlab-rake gitlab:backup:create +Dumping database ... +Dumping PostgreSQL database gitlabhq_production ... [DONE] +done +Dumping repositories ... + * win/flaskapi ... [DONE] + * win/flaskapi.wiki ... [SKIPPED] + * xiang/flaskapi ... [DONE] + * xiang/flaskapi.wiki ... [SKIPPED] +done +Dumping uploads ... +done +Dumping builds ... +done +Dumping artifacts ... +done +Dumping pages ... +done +Dumping lfs objects ... +done +Dumping container registry images ... +[DISABLED] +Creating backup archive: 1537738648_2018_09_23_10.8.3_gitlab_backup.tar ... done +Uploading backup archive to remote storage ... skipped +Deleting tmp directories ... done +done +done +done +done +done +done +done +Deleting old backups ... skipping +xiang@ubuntu1804:~$ +``` + +!!! note + + The backup uses the Linux commands `tar` and `gzip`. This works fine in most cases, but can cause problems when data is rapidly changing. When data changes while tar is reading it, the error `file changed as we read it` may occur, and will cause the backup process to fail. In such case, you add the copy strategy to your backup command like `docker exec -it gitlab1083 gitlab-rake gitlab:backup:create STRATEGY=copy`. + +## Check the backup + +In fact, I created twice the backup, so we can see two backups here with different timestamps: `1537738648_2018_09_23_10.8.3`, `1537738690_2018_09_23_10.8.3`. + +Notice that the backup file names don't contain the Gitlab type (ce for community edition), they only have the creation time (1537738648_2018_09_23 for the first backup file) and the Gitlab version (10.8.3). + +We can also find that the backup account is `git`. + +```bash +# From Gitlab docker + +root@gitlab:/etc/gitlab# ls -lart /var/opt/gitlab/backups +total 644 +drwxr-xr-x 19 root root 4096 Sep 22 23:52 .. +-rw------- 1 git git 215040 Sep 23 21:37 1537738648_2018_09_23_10.8.3_gitlab_backup.tar +-rw------- 1 git git 215040 Sep 23 21:38 1537738690_2018_09_23_10.8.3_gitlab_backup.tar +drwx------ 2 git root 4096 Sep 23 21:38 . +``` + +## Backup configuration and secret files + +Yes, the configuration and secret files are not backed up during the [previous backup procedure](#create-the-backup). This is because the previous one [encrypts the some Gitlab data by using the secret key](https://docs.gitlab.com/ee/raketasks/backup_restore.html#storing-configuration-files) in the configuration and secret files. If you save them to the same place, you're just defeating the encryption. + +So please also backup `/etc/gitlab/gitlab.rb` and `/etc/gitlab/gitlab-secrets.json` and save them to a secure place from other Gitlab backup data. + +## Upload backups to remote storage + +I haven't tested yet, here is the [official doc](https://docs.gitlab.com/ee/raketasks/backup_restore.html#uploading-backups-to-a-remote-cloud-storage). + +## Restore Gitlab + +You can only restore the Gitlab backup to exactly the same Gitlab version and type. And you also need to have a working Gitlab instance. + +## Stop some Gitlab services + +```bash +# From Gitlab docker + +gitlab-ctl reconfigure +gitlab-ctl start +gitlab-ctl stop unicorn +gitlab-ctl stop sidekiq +gitlab-ctl status +ls -lart /var/opt/gitlab/backups +``` + +## Start the restore + +The backup file must can be found in the [backup path](#locate-backup-path), which is defined in the configuration file `/etc/gitlab/gitlab.rb` by the key `gitlab_rails['backup_path']`. + +```bash +# From Ubuntu host outside of the Gitlab docker + +xiang@ubuntu1804:~$ docker exec -it gitlab1083 gitlab-rake gitlab:backup:restore --trace +** Invoke gitlab:backup:restore (first_time) +** Invoke gitlab_environment (first_time) +** Invoke environment (first_time) +** Execute environment +** Execute gitlab_environment +** Execute gitlab:backup:restore +Unpacking backup ... done +Before restoring the database, we will remove all existing +tables to avoid future upgrade problems. Be aware that if you have +custom tables in the GitLab database these tables and all data will be +removed. + +Do you want to continue (yes/no)? yes +Removing all tables. Press `Ctrl-C` within 5 seconds to abort +(...) +COPY 0 + setval +-------- + 1 +(1 row) + +COPY 0 + setval +-------- + 1 +(1 row) +(...) +ALTER TABLE +ALTER TABLE +(...) +CREATE INDEX +(...) +ALTER TABLE +ALTER TABLE +(...) +WARNING: no privileges were granted for "public" +GRANT +[DONE] +done +** Invoke gitlab:backup:repo:restore (first_time) +** Invoke gitlab_environment +** Execute gitlab:backup:repo:restore +Restoring repositories ... + * win/flaskapi ... [DONE] + * xiang/flaskapi ... [DONE] +Put GitLab hooks in repositories dirs [DONE] +done +** Invoke gitlab:backup:uploads:restore (first_time) +** Invoke gitlab_environment +** Execute gitlab:backup:uploads:restore +Restoring uploads ... +done +** Invoke gitlab:backup:builds:restore (first_time) +** Invoke gitlab_environment +** Execute gitlab:backup:builds:restore +Restoring builds ... +done +** Invoke gitlab:backup:artifacts:restore (first_time) +** Invoke gitlab_environment +** Execute gitlab:backup:artifacts:restore +Restoring artifacts ... +done +** Invoke gitlab:backup:pages:restore (first_time) +** Invoke gitlab_environment +** Execute gitlab:backup:pages:restore +Restoring pages ... +done +** Invoke gitlab:backup:lfs:restore (first_time) +** Invoke gitlab_environment +** Execute gitlab:backup:lfs:restore +Restoring lfs objects ... +done +** Invoke gitlab:shell:setup (first_time) +** Invoke gitlab_environment +** Execute gitlab:shell:setup +This will rebuild an authorized_keys file. +You will lose any data stored in authorized_keys file. +Do you want to continue (yes/no)? yes + +** Invoke cache:clear (first_time) +** Invoke cache:clear:redis (first_time) +** Invoke environment +** Execute cache:clear:redis +** Execute cache:clear +Deleting tmp directories ... done +done +done +done +done +done +done +done +xiang@ubuntu1804:~$ +``` + +!!! note + + We can also add the param BACKUP to specify the backup file if there're more than one backup tar file in the backup path. The value of the BACKUP is the [backup file timestamp](#gitlab-version), for example : `docker exec -it gitlab1083 gitlab-rake gitlab:backup:restore BACKUP=1537738690_2018_09_23_10.8.3 --trace`. + +## Restart Gitlab with sanity check + +Restart the Gitlab services by `gitlab-ctl restart`: + +```bash +# From Gitlab docker + +root@gitlab:/# gitlab-ctl restart +ok: run: alertmanager: (pid 2789) 1s +ok: run: gitaly: (pid 2797) 0s +ok: run: gitlab-monitor: (pid 2806) 0s +ok: run: gitlab-workhorse: (pid 2811) 1s +ok: run: logrotate: (pid 2827) 0s +ok: run: nginx: (pid 2834) 1s +ok: run: node-exporter: (pid 2839) 0s +ok: run: postgres-exporter: (pid 2845) 1s +ok: run: postgresql: (pid 2855) 0s +ok: run: prometheus: (pid 2864) 0s +ok: run: redis: (pid 2873) 1s +ok: run: redis-exporter: (pid 2877) 0s +ok: run: sidekiq: (pid 2957) 0s +ok: run: sshd: (pid 2960) 0s +ok: run: unicorn: (pid 2968) 1s +``` + +Launch the Gitlab sanity check by `gitlab-rake gitlab:check SANITIZE=true`: + +```bash +root@gitlab:/# gitlab-rake gitlab:check SANITIZE=true +Checking GitLab Shell ... + +GitLab Shell version >= 7.1.2 ? ... OK (7.1.2) +Repo base directory exists? +default... yes +Repo storage directories are symlinks? +default... no +Repo paths owned by git:root, or git:git? +default... yes +Repo paths access is drwxrws---? +default... yes +hooks directories in repos are links: ... +3/2 ... ok +2/3 ... ok +Running /opt/gitlab/embedded/service/gitlab-shell/bin/check +Check GitLab API access: FAILED: Failed to connect to internal API +gitlab-shell self-check failed + Try fixing it: + Make sure GitLab is running; + Check the gitlab-shell configuration file: + sudo -u git -H editor /opt/gitlab/embedded/service/gitlab-shell/config.yml + Please fix the error above and rerun the checks. + +Checking GitLab Shell ... Finished + +Checking Sidekiq ... + +Running? ... no + Try fixing it: + sudo -u git -H RAILS_ENV=production bin/background_jobs start + For more information see: + doc/install/installation.md in section "Install Init Script" + see log/sidekiq.log for possible errors + Please fix the error above and rerun the checks. + +Checking Sidekiq ... Finished + +Reply by email is disabled in config/gitlab.yml +Checking LDAP ... + +LDAP is disabled in config/gitlab.yml + +Checking LDAP ... Finished + +Checking GitLab ... + +Git configured correctly? ... yes +Database config exists? ... yes +All migrations up? ... yesyes +Database contains orphaned GroupMembers? ... nono +GitLab config exists? ... yes +GitLab config up to date? ... yes +Log directory writable? ... yes +Tmp directory writable? ... yes +Uploads directory exists? ... yes +Uploads directory has correct permissions? ... yes +Uploads directory tmp has correct permissions? ... yes +Init script exists? ... skipped (omnibus-gitlab has no init script) +Init script up-to-date? ... skipped (omnibus-gitlab has no init script) +Projects have namespace: ... +3/2 ... yesyes +2/3 ... yes +Redis version >= 2.8.0? ... yes +Ruby version >= 2.3.5 ? ... yes +Ruby version >= 2.3.5 ? ... yes (2.3.7) +Git version >= 2.9.5 ? ... yes (2.16.4)yes (2.3.7) +Git version >= 2.9.5 ? ... yes (2.16.4) +Git user has default SSH configuration? ... yes +Active users: ... 2 + +Checking GitLab ... Finished + +root@gitlab:/# +``` + +Verify the Gitlab container health by `docker ps`: + +```bash +# From Ubuntu host outside of the Gitlab docker + +xiang@ubuntu1804:~$ docker ps +CONTAINER ID IMAGE COMMAND CREATED STATUS +PORTS NAMES +707439b39dd1 gitlab/gitlab-ce:10.8.3-ce.0 "/assets/wrapper" 2 weeks ago Up 15 minutes (healthy) +0.0.0.0:80->80/tcp, 0.0.0.0:443->443/tcp, 0.0.0.0:2222->22/tcp gitlab1083 +``` diff --git a/docs/posts/2018/2018-09-28-terminate-powershell-script-or-session.md b/docs/posts/2018/2018-09-28-terminate-powershell-script-or-session.md new file mode 100644 index 00000000..f3c489cb --- /dev/null +++ b/docs/posts/2018/2018-09-28-terminate-powershell-script-or-session.md @@ -0,0 +1,371 @@ +--- +authors: +- copdips +categories: +- powershell +comments: true +date: + created: 2018-09-28 +description: Some ways to terminate the Powershell script or session. +--- + +# Terminate Powershell script or session + +I always asked myself how to terminate a Powershell script or session, each time I needed to do some tests by myself and also searched on Google. But I could never remember it. So I would like to take this post to note it down, the next time I need to terminate, just need to come back to here. + + + +## Terminate the current Powershell script + +## Way 1 - Exit + +### Exit without exit code + +```powershell +5.1> Get-Content .\test.ps1 +function foo { + Write-Output beginFunction + 1..3 | % { + Write-Output $_ + exit + } + Write-Output endFunction +} + +Write-Output beginScript +foo +Write-Output endScript + +5.1> .\test.ps1 +beginScript +beginFunction +1 + +5.1> Write-Host "Last execution status: $?" ; Write-Host "Last exit code: $LASTEXITCODE" +Last execution status: True +Last exit code: 0 + +5.1> +``` + +### Exit with code 0 + +```powershell +5.1> Get-Content .\test.ps1 +function foo { + Write-Output beginFunction + 1..3 | % { + Write-Output $_ + exit 0 + } + Write-Output endFunction +} + +Write-Output beginScript +foo +Write-Output endScript + +5.1> .\test.ps1 +beginScript +beginFunction +1 + +5.1> Write-Host "Last execution status: $?" ; Write-Host "Last exit code: $LASTEXITCODE" +Last execution status: True +Last exit code: 0 + +5.1> +``` + +### Exit with code 1 + +```powershell +5.1> Get-Content .\test.ps1 +function foo { + Write-Output beginFunction + 1..3 | % { + Write-Output $_ + exit 1 + } + Write-Output endFunction +} + +Write-Output beginScript +foo +Write-Output endScript + +5.1> .\test.ps1 +beginScript +beginFunction +1 + +5.1> Write-Host "Last execution status: $?" ; Write-Host "Last exit code: $LASTEXITCODE" +Last execution status: False +Last exit code: 0 + +5.1> +``` + +## Way 2 - Break + +### Break with an UnknownLabel terminates the script directly + +```powershell +5.1> Get-Content .\test.ps1 +function foo { + Write-Output beginFunction + 1..3 | % { + Write-Output $_ + break foobar + } + Write-Output endFunction +} + +Write-Output beginScript +foo +Write-Output endScript + +5.1> .\test.ps1 +beginScript +beginFunction +1 + +5.1> Write-Host "Last execution status: $?" ; Write-Host "Last exit code: $LASTEXITCODE" +Last execution status: True +Last exit code: 0 + +5.1> +``` + +### But it terminates also the caller script + +```powershell +5.1> Get-Content .\test.ps1 +function foo { + Write-Output beginFunction + 1..3 | % { + Write-Output $_ + break foobar + } + Write-Output endFunction +} + +Write-Output beginScript +foo +Write-Output endScript + +5.1> Get-Content .\call-test.ps1 +Write-Output 'Call test.ps1' +./test.ps1 +Write-Output 'End call test.ps' +Write-Output "call-test.ps1: Last execution status: $?" +Write-Output "call-test.ps1: Last exit code: $LASTEXITCODE" + +5.1> .\call-test.ps1 +Call test.ps1 +beginScript +beginFunction +1 + +5.1> +``` + +!!! warning + + Never use `break UnknownLabel` to terminate the script. Break does't raise error, the caller script cannot catch its output. + +## Terminate the current Powershell session + +## Way 1 - System.Environment.Exit + + + +### Environment.Exit with code 0 and started by powershell.exe + +```powershell +5.1> Get-Content .\test.ps1 +function foo { + Write-Output beginFunction + 1..3 | % { + Write-Output $_ + [Environment]::Exit(0) + } + Write-Output endFunction +} + +Write-Output beginScript +foo +Write-Output endScript + +5.1> powershell -noprofile .\test.ps1 +beginScript +beginFunction +1 + +5.1> Write-Host "Last execution status: $?" ; Write-Host "Last exit code: $LASTEXITCODE" +Last execution status: True +Last exit code: 0 + +5.1> +``` + +### Environment.Exit with code 1 and started by powershell.exe + +```powershell +5.1> Get-Content .\test.ps1 +function foo { + Write-Output beginFunction + 1..3 | % { + Write-Output $_ + [Environment]::Exit(1) + } + Write-Output endFunction +} + +Write-Output beginScript +foo +Write-Output endScript + +5.1> powershell -noprofile .\test.ps1 +beginScript +beginFunction +1 + +5.1> Write-Host "Last execution status: $?" ; Write-Host "Last exit code: $LASTEXITCODE" +Last execution status: False +Last exit code: 0 + +5.1> +``` + +### Environment.Exit with code 0 and started by Start-Process + +```powershell +5.1> Get-Content .\test.ps1 +function foo { + Write-Output beginFunction + 1..3 | % { + Write-Output $_ + [Environment]::Exit(0) + } + Write-Output endFunction +} + +Write-Output beginScript +foo +Write-Output endScript + +5.1> Start-Process .\test.ps1 + + +5.1> Write-Host "Last execution status: $?" ; Write-Host "Last exit code: $LASTEXITCODE" +Last execution status: True +Last exit code: 0 + +5.1> +``` + +### Environment.Exit with code 1 and started by Start-Process + +```powershell +5.1> Get-Content .\test.ps1 +function foo { + Write-Output beginFunction + 1..3 | % { + Write-Output $_ + [Environment]::Exit(1) + } + Write-Output endFunction +} + +Write-Output beginScript +foo +Write-Output endScript + +5.1> Start-Process .\test.ps1 + + +5.1> Write-Host "Last execution status: $?" ; Write-Host "Last exit code: $LASTEXITCODE" +Last execution status: True +Last exit code: 0 + +5.1> +``` + +## Way 2 - Stop-Process + +Powershell has an [automatic variable called $PID](https://docs.microsoft.com/en-us/powershell/module/microsoft.powershell.core/about/about_automatic_variables?view=powershell-6#pid) which refers to the process ID that is hosting the current PowerShell session. + +### Stop-Process started by powershell.exe + +```powershell +5.1> Get-Content .\test.ps1 +function foo { + Write-Output beginFunction + 1..3 | % { + Write-Output $_ + Write-Output "Kill process $PID" + Stop-Process $PID + } + Write-Output endFunction +} + +Write-Output beginScript +foo +Write-Output endScript + +5.1> powershell -NoProfile .\test.ps1 +beginScript +beginFunction +1 +Kill process 12348 + +5.1> Write-Host "Last execution status: $?" ; Write-Host "Last exit code: $LASTEXITCODE" +Last execution status: False +Last exit code: 0 + +5.1> +``` + +### Stop-Process started by Start-Process + +```powershell +5.1> Get-Content .\test.ps1 +function foo { + Write-Output beginFunction + 1..3 | % { + Write-Output $_ + Write-Output "Kill process $PID" + Stop-Process $PID + } + Write-Output endFunction +} + +Write-Output beginScript +foo +Write-Output endScript + +5.1> Start-Process .\test.ps1 + + +5.1> Write-Host "Last execution status: $?" ; Write-Host "Last exit code: $LASTEXITCODE" +Last execution status: True +Last exit code: 0 + +5.1> +``` + +## Conclusion + +| Goal | Terminate Method | Last execution status: $? | Last exit code: $LASTEXITCODE | Comment | +|-------------------|--------------------------------------------------|---------------------------|-------------------------------|------------------| +| Terminate Script | exit | True | 0 | | +| Terminate Script | exit 0 | True | 0 | | +| Terminate Script | exit 1 | False | 0 | | +| Terminate Script | break UnknownLabel | True | 0 | **Never use it** | +| Terminate Process | [Environment]::Exit(0) started by powershell.exe | True | 0 | | +| Terminate Process | [Environment]::Exit(1) started by powershell.exe | False | 0 | | +| Terminate Process | [Environment]::Exit(0) started by Start-Process | True | 0 | | +| Terminate Process | [Environment]::Exit(1) started by Start-Process | True | 0 | | +| Terminate Process | Stop-Process started by powershell.exe | False | 0 | | +| Terminate Process | Stop-Process started by Start-Process | True | 0 | | diff --git a/docs/posts/2018/2018-10-03-update-gitlab-in-docker.md b/docs/posts/2018/2018-10-03-update-gitlab-in-docker.md new file mode 100644 index 00000000..f683c749 --- /dev/null +++ b/docs/posts/2018/2018-10-03-update-gitlab-in-docker.md @@ -0,0 +1,97 @@ +--- +authors: +- copdips +categories: +- gitlab +- cicd +- docker +- ubuntu +comments: true +date: + created: 2018-10-03 +description: Step by step procedure to update Gitlab in docker. +--- + +# Update Gitlab in docker + +Gitlab has several methods to update to newer version depending on the type of the original installation and the Gitlab version. This post will show you the way for docker version of Gitlab, which is the simplest among others. + + + +## Some docs on the Internet + +This post will follow the [official doc for updating docker version of Gitlab](https://docs.gitlab.com/omnibus/docker/README.html#upgrade-gitlab-to-newer-version). + +If you installed the [Gitlab with docker compose](https://docs.gitlab.com/omnibus/docker/README.html#install-gitlab-using-docker-compose), please follow [this official procedure](https://docs.gitlab.com/omnibus/docker/README.html#update-gitlab-using-docker-compose). + +And hereunder some docs for the non docker version update if you are interested: +1. [Official global Gitlab update doc](https://docs.gitlab.com/ee/update/) +2. [Official doc for upgrading without downtime](https://docs.gitlab.com/ee/update/#upgrading-without-downtime) +3. [Official doc for updating Gitlab installed from source](https://gitlab.com/gitlab-org/gitlab-ce/tree/master/doc/update) +4. [Official doc for patching between minor feature versions](https://docs.gitlab.com/ee/update/patch_versions.html) +5. [Official doc for restoring from backup after a failed upgrade](https://docs.gitlab.com/ee/update/restore_after_failure.html) + +## Backup before anything + +We must backup the Gitlab before everything. I've already written [a post](https://copdips.com/2018/09/backup-and-restore-gitlab-in-docker.html#backup-gitlab-in-docker) on how to backup up Gitlab docker version. + +## Verify the docker container volumes + +The update procedure will remove the current Gitlab container, so the data must be kept somewhere to be reused by the update. As I wrote in [a previous post](https://copdips.com/2018/09/install-gitlab-ce-in-docker-on-ubuntu.html#install-gitlab-ce-in-docker) on how to install Gitlab in docker, we used the `docker run --volume` to mount the docker host volumes to Gitlab container. So even the Gitlab container is removed, the data are still kept in the docker host. + +To verify the mounted volumes: + +```bash +xiang@ubuntu1804:~$ docker ps +CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES +707439b39dd1 gitlab/gitlab-ce:10.8.3-ce.0 "/assets/wrapper" 3 weeks ago Up 2 hours (healthy) 0.0.0.0:80->80/tcp, 0.0.0.0:443->443/tcp, 0.0.0.0:2222->22/tcp gitlab +xiang@ubuntu1804:~$ +xiang@ubuntu1804:~$ docker container inspect -f "{{ json .HostConfig.Binds }}" gitlab | python3 -m json.tool +[ + "/srv/gitlab/config:/etc/gitlab", + "/srv/gitlab/logs:/var/log/gitlab", + "/srv/gitlab/data:/var/opt/gitlab" +] +``` + +Ok, I see there're three volumes mounted in the Gitlab container, it's good. + +## Update docker version of Gitlab + +Exactly the same procedure as [the official one](https://docs.gitlab.com/omnibus/docker/#upgrade-gitlab-to-newer-version). I will update the current gitlab-ce:10.8.3-ce.0 to gitlab-ce:latest + +1. Pull the new image: + + To pull other version, change the `lastest` by the tag name which can be found from the [docker hub](https://hub.docker.com/r/gitlab/gitlab-ce/tags/). + + ```bash + docker pull gitlab/gitlab-ce:latest + + ``` + +2. Stop the running container called gitlab: + ```bash + docker stop gitlab + ``` + +3. Remove existing container: + ```bash + docker rm gitlab + ``` + +4. Create the container once again with [previously specified options](https://copdips.com/2018/09/install-gitlab-ce-in-docker-on-ubuntu.html#install-gitlab-ce-in-dockers): + ```bash + docker run --detach \ + --hostname gitlab.copdips.local \ + --publish 443:443 --publish 80:80 --publish 2222:22 \ + --name gitlab \ + --restart always \ + --volume /srv/gitlab/config:/etc/gitlab \ + --volume /srv/gitlab/logs:/var/log/gitlab \ + --volume /srv/gitlab/data:/var/opt/gitlab \ + gitlab/gitlab-ce:latest + ``` + +That's all, go to take a coffee, GitLab will reconfigure and update itself, the procedure is pretty simple. + +If you take a look at [the procedure for Gitlab installed from the source](https://gitlab.com/gitlab-org/gitlab-ce/tree/master/doc/update), you will thank yourself for choosing to install Gitlab in docker, because you chose the zen. diff --git a/docs/posts/2018/2018-10-10-migrate-gitlab-in-docker.md b/docs/posts/2018/2018-10-10-migrate-gitlab-in-docker.md new file mode 100644 index 00000000..c8496e0b --- /dev/null +++ b/docs/posts/2018/2018-10-10-migrate-gitlab-in-docker.md @@ -0,0 +1,174 @@ +--- +authors: +- copdips +categories: +- gitlab +- cicd +- docker +- migration +- ubuntu +comments: true +date: + created: 2018-10-10 +description: Step by step procedure to update Gitlab in docker. +--- + +# Migrate Gitlab in docker + +This post will walk you through the steps to migrate Gitlab from one docker container to another. The steps need you to know how to install a new Gitlab container and how to backup and restore Gitlab container, because the migration is just a restoration of a backup to another container. + + + +## Some docs on the Internet + +1. [Migrate GitLab Instance to new Host](https://pikedom.com/migrate-gitlab-instance-to-new-host/) + +## Backup before anything + +We must backup the Gitlab before everything. I've already written [a post](https://copdips.com/2018/09/backup-and-restore-gitlab-in-docker.html#backup-gitlab-in-docker) on how to backup up Gitlab docker version. For a double insurance, you can also at first create a VM snapshot/checkpoint, but don't forget to delete it as soon as the migration is successfully finished. + +In this post, we'll also use this backup to migrate the date to the new Gitlab container. The backup file name is `1538845523_2018_10_06_11.3.1_gitlab_backup.tar`. + +## Backup host key (optional) + +If rebuilding the machine and keeping the same IP, to avoid having to delete the host key entry in the ~/.ssh/know_hosts file, run the following to backup the SSH host keys. + +```bash +# From gitlab docker container + +root@gitlab:/# tar -cvf $(date "+hostkeys-%s.tar") $(grep HostKey /etc/ssh/sshd_config | grep -v ^'#' | awk '{print $2}') +``` + +## Install a new Gitlab in docker with the same version + +I've already written [a post](https://copdips.com/2018/09/install-gitlab-ce-in-docker-on-ubuntu.html#install-gitlab-ce-in-docker) on how to install Gitlab-CE in docker. Be aware that for this container installation inside the same Ubuntu VM, we should map some new volumes and provide a new container name. If you install Gitlab container in another VM, of course you can reuse the same volume name and container name. + +## Verify the new Gitlab SSL certificate before the migration + +Depends on your client OS (Linux or Windows), you can use [the commands here](https://copdips.com/2018/09/setup-https-for-gitlab.html#check-the-website-ssl-certificate-from-the-command-line) to verify the SSL certificate. The mine is a Windows 10. Note that the certificate's serial number is `8c:87:45:ab:b9:04:b0:ae`. + +```powershell +6.1.0> $null | openssl s_client -connect gitlab.copdips.local:443 | openssl x509 -text -noout +depth=0 O = copdips, CN = gitlab.copdips.local +verify error:num=18:self signed certificate +verify return:1 +depth=0 O = copdips, CN = gitlab.copdips.local +verify return:1 +DONE +Certificate: + Data: + Version: 3 (0x2) + Serial Number: + 8c:87:45:ab:b9:04:b0:ae + Signature Algorithm: sha256WithRSAEncryption + Issuer: O=copdips, CN=gitlab.copdips.local + Validity + Not Before: Oct 2 21:00:13 2018 GMT + Not After : Jun 28 21:00:13 2021 GMT + Subject: O=copdips, CN=gitlab.copdips.local +(...) +``` + +## Transfer the backup + +Copy the backup file `1538845523_2018_10_06_11.3.1_gitlab_backup.tar` to the new Gitlab. From the backup name, we know the old gitlab version is at v11.3.1, this version must be exaclty the same as the new Gitlab. + +To verify current Gitlab docker version: + +```bash +# From gitlab docker container + +root@gitlab:/# gitlab-rake gitlab:env:info | grep "GitLab information" -A2 +GitLab information +Version: 11.3.1 +Revision: 32cb452 +``` + +Transfer the backup file: + +```bash +# From Ubuntu host outside of the Gitlab docker container + +xiang@ubuntu1804:~$ sudo cp \ + /srv/gitlab1083/data/backups/1538845523_2018_10_06_11.3.1_gitlab_backup.tar \ + /srv/gitlab-new/data/backups/ +``` + +## Check the backup permission + +The backup file must be owned by `git` account. The previous copy make the file's owner as `root:root`, so we need to change it. + +```bash +# From gitlab docker container + +root@gitlab:/# ls -lart /var/opt/gitlab/backups +total 344 +-rw------- 1 git git 81920 Oct 2 21:33 1538516038_2018_10_02_10.8.3_gitlab_backup.tar +drwx------ 8 git git 4096 Oct 2 21:40 tmp +-rw------- 1 root root 256000 Oct 8 21:00 1538845523_2018_10_06_11.3.1_gitlab_backup.tar +drwx------ 3 git root 4096 Oct 8 21:00 . +drwxr-xr-x 20 root root 4096 Oct 8 21:06 .. +root@gitlab:/# chown -v git:git /var/opt/gitlab/backups/1538845523_2018_10_06_11.3.1_gitlab_backup.tar +changed ownership of '/var/opt/gitlab/backups/1538845523_2018_10_06_11.3.1_gitlab_backup.tar' from root:root to git:git +``` + +## Migrate by restoring from the backup + +For docker version of Gitlab, the migration is just a [standard restoration procedure](https://copdips.com/2018/09/backup-and-restore-gitlab-in-docker.html#restore-gitlab). + +## Stop unicorn and sidekiq +```bash +# From gitlab docker container + +root@gitlab:/# gitlab-ctl reconfigure +gitlab-ctl start +gitlab-ctl stop unicorn +gitlab-ctl stop sidekiq +gitlab-ctl status +ls -lart /var/opt/gitlab/backups +``` + +## Start restore + +```bash +# From Ubuntu host outside of the Gitlab docker container + +xiang@ubuntu1804:~$ docker exec -it gitlab gitlab-rake gitlab:backup:restore BACKUP=1538845523_2018_10_06_11.3.1 --trace +``` + +## Start Gitlab + +```bash +# From Gitlab docker container + +root@gitlab:/# gitlab-ctl restart +root@gitlab:/# gitlab-rake gitlab:check SANITIZE=true +``` + +## Verify + +## Verify the config file gitlab.rb + +The config file is not replaced by the backup. If you want to use the config from the old container, just copy the file, and restart Gitlab by `gitlab-ctl reconfigure` from the docker container or `docker restart [container name]` from the docker host. To locate the config file, you can refer to [this post](https://copdips.com/2018/09/setup-https-for-gitlab.html#configure-https-on-gitlab). + +## Verify SSL certificate + +By [rechecking the SSL certificate](https://copdips.com/2018/10/migrate-gitlab-in-docker.html#verify-the-new-gitlab-ssl-certificate-before-the-migration), the SSL certificate is not replaced. If you want to keep the old certificate especially if your certificate is self-signed, you need to copy it from the old container's volume. You can check [this post](https://copdips.com/2018/09/setup-https-for-gitlab.html#save-the-ssl-certificate) to locate the SSL certificate. + +## Verify local user accounts + +The local user accounts are replaced by the backup. Good. + +## Verify repositories + +The repositories are replaced by the backup. Good. + +## Verify Gitlab runner + +The Gitlab runner are replaced by the backup. Good. + +!!! warning + + But if the Gitlab SSL certificate is **self-signed**, and you dont want to restore the old one from the old container, you need to import the new self-signed SSL certificate to all Gitlab runners's cert store, at least for Windows runners, Linux runners are not tested because I'm still a novice on Linux. + +Please take a look at the line starting by `Import-Certificate` from [this post](https://copdips.com/2018/09/install-gitlab-runner-on-windows-by-powershell-psremoting.html#register-gitlab-runner-on-windows) to learn how to import the certificate to the `Trusted Root Certification Authorities` logical store in the [Windows certificate store](https://docs.microsoft.com/en-us/windows/desktop/seccertenroll/about-certificate-directory). diff --git a/docs/posts/2018/2018-10-18-using-gitlab-integrated-cicd-for-python-project-on-windows.md b/docs/posts/2018/2018-10-18-using-gitlab-integrated-cicd-for-python-project-on-windows.md new file mode 100644 index 00000000..bcb7c8f0 --- /dev/null +++ b/docs/posts/2018/2018-10-18-using-gitlab-integrated-cicd-for-python-project-on-windows.md @@ -0,0 +1,253 @@ +--- +authors: +- copdips +categories: +- gitlab +- cicd +- python +- powershell +comments: true +date: + created: 2018-10-18 +description: Gitlab ships with its own free CICD which works pretty well. This post + will give a .gitlab-ci.yml demo for a Python project running on Gitlab Windows runner. +--- + +# Using Gitlab integrated CICD for Python project on Windows + +Gitlab ships with its own free CICD which works pretty well. This post will give you an example of the CICD file `.gitlab-ci.yml` for a Python project running on [Gitlab Windows runner](https://copdips.com/2018/09/install-gitlab-runner-on-windows-by-powershell-psremoting.html). + + + +## Some docs on the Internet + +1. [Official GitLab Continuous Integration (GitLab CI/CD)](https://docs.gitlab.com/ee/ci/README.html) +2. [Official Configuration of your jobs with .gitlab-ci.yml](https://docs.gitlab.com/ee/ci/yaml/README.html) +3. [Official Gitlab Pipelines settings](https://docs.gitlab.com/ee/user/project/pipelines/settings.html) +4. [Official Publish code coverage report with GitLab Pages](https://about.gitlab.com/2016/11/03/publish-code-coverage-report-with-gitlab-pages/) +5. [introduction-gitlab-ci](https://blog.eleven-labs.com/fr/introduction-gitlab-ci/) +6. [Rubular: a Ruby regular expression editor and tester](http://rubular.com/) + +## Code Coverage + +The official doc on how to use coverage is not very clear. + +My coverage tool's output (from `pytest --cov=`) is something like : + +```shell +----------- coverage: platform win32, python 3.7.0-final-0 ----------- +Name Stmts Miss Cover +------------------------------------------------------------- +python_project\__init__.py 6 0 100% +python_project\ctx_fetcher.py 15 0 100% +python_project\extras\__init__.py 0 0 100% +python_project\extras\celery.py 18 18 0% +python_project\filters.py 6 2 67% +python_project\parser.py 26 0 100% +python_project\request_id.py 42 1 98% +------------------------------------------------------------- +TOTAL 113 21 81% +``` + +In my example [.gitlab-ci.yml](https://copdips.com/2018/10/using-gitlab-integrated-cicd-for-python-project-on-windows.html#gitlab-ci-yml-file-content), the coverage is configured as: + +```yml +coverage: '/^TOTAL.*\s+(\d+\%)$/' +``` + +This regex will find the coverage which is at `81%`. + +**Be aware that**: + +1. The coverage only use regular expression to find the coverage percentage from coverage tool's output. +2. The regular expression must be surrounded by single quote `'`, double quote is not allowed. +3. Inside the single quotes, must be surrounded by `/`. +4. You can use to test your regex. +5. The overage regex returns the last catch group value from the output. Even if it is not in the last line, or if the regex catches more than one values among all the lines. + +## .gitlab-ci.yml example for Python project on a Windows runner + +## .gitlab-ci.yml file content + +I cloned the project [flask_log_request_id](https://github.com/Workable/flask-log-request-id) and try to run CICD over it. + +!!! note + + I'm still working on this CICD `.gitlab-ci.yml` file, the example given here will be updated as long as I add new things inside. + +```yml +stages: + - venv + - test + - build + - deploy + +before_script: + - $gitApiUrl = 'https://gitlab.copdips.local/api/v4' + # will save git api token more securely later. + - $gitApiToken = $env:GitApiToken + - $gitApiHeader = @{"PRIVATE-TOKEN" = $gitApiToken} + - $cicdReportsFolderPath = Join-Path (Get-Location) "cicd_reports" + - $venvPath = "$env:temp/venv/$($env:CI_PROJECT_NAME)" + - > + function Set-SecurityProtocolType { + # [Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12 + # $AllProtocols = [System.Net.SecurityProtocolType]'Ssl3,Tls,Tls11,Tls12' + $AllProtocols = [System.Net.SecurityProtocolType]'Tls12' + [System.Net.ServicePointManager]::SecurityProtocol = $AllProtocols + } + - > + function Write-PythonPath { + $pythonPath = $(Get-Command python | % source) + Write-Output "The python path is at: '$pythonPath'" + } + - > + function Get-UpstreamProject { + $apiParam = @{ + Headers = $gitApiHeader + Uri = "$gitApiUrl/projects?search=$($env:CI_PROJECT_NAME)" + } + if ($PSVersionTable.PSVersion.Major -gt 5) { + $apiParam.SkipCertificateCheck = $true + } + $projectList = Invoke-RestMethod @apiParam + $upstreamProject = $projectList | ? forked_from_project -eq $null + return $upstreamProject + } + - > + function Get-UpstreamProjectId { + $upstreamProject = Get-UpstreamProject + return $upstreamProject.id + } + + - > + function Test-CreateVenv { + param($VenvPath, $GitCommitSHA) + $gitShowCommand = "git show $GitCommitSHA --name-only" + $gitShowResult = Invoke-Expression $gitShowCommand + Write-Host "$gitShowCommand`n" + $gitShowResult | ForEach-Object {Write-Host $_} + $changedFiles = Invoke-Expression "git diff-tree --no-commit-id --name-only -r $GitCommitSHA" + $requirementsFiles = @() + $requirementsFiles += "requirements.txt" + foreach ($requirements in $requirementsFiles) { + if ($requirements -in $changedFiles) { + Write-Host "`nFound $requirements in the changed files, need to create venv." + return $True + } + } + if (-not (Test-Path $VenvPath)) { + Write-Host "`nCannot found venv at $VenvPath, need to create venv." + return $True + } + + Write-Host "`nNo need to create venv." + return $False + } + - > + function Enable-Venv { + param($VenvPath) + + Invoke-Expression (Join-Path $VenvPath "Scripts/activate.ps1") + Write-Host "venv enabled at: $VenvPath" + Write-PythonPath + } + - > + function Create-Venv { + param($VenvPath) + + Write-Output "Creating venv at $venvPath ." + python -m venv $VenvPath + Write-Output "venv created at $venvPath ." + } + - > + function Install-PythonRequirements { + param($VenvPath) + + Enable-Venv $VenvPath + python -m pip install -U pip setuptools wheel + pip install -r requirements.txt + } + - > + function Remove-Venv { + param($VenvPath) + + if (Test-Path $VenvPath) { + Remove-Item $VenvPath -Recurse -Force + Write-Host "venv removed from: $VenvPath" + } else { + Write-Host "venv not found at: $VenvPath" + } + } + - Get-Location + - git --version + - python --version + - Write-PythonPath + - $PSVersionTable | ft -a + - Get-ChildItem env:\ | Select-Object Name, Value | ft -a + +venv: + stage: venv + script: + - > + if (Test-CreateVenv $venvPath $env:CI_COMMIT_SHA) { + Remove-Venv $venvPath + Create-Venv $venvPath + } + Install-PythonRequirements $venvPath + +pytest: + stage: test + script: + - $reportFolder = Join-Path $cicdReportsFolderPath "pytest" + - New-Item -Path $reportFolder -Type Directory -Force + - $upstreamProjectId = Get-UpstreamProjectId + - Write-Output "upstreamProjectId = $upstreamProjectId" + # TODO: add check master last commit coverage + - Enable-Venv $venvPath + - pytest --cov=flask_log_request_id --cov-report=html:$reportFolder + - $coverageLine = (Get-Content (Join-Path $reportFolder index.html) | Select-String "pc_cov").line + - $coverageString = ($coverageLine -replace "<[^>]*>", "").trim() + - Write-Output "Total Coverage = $coverageString" + coverage: '/^(?i)(TOTAL).*\s+(\d+\%)$/' + + +nosetests: + stage: test + script: + - Enable-Venv $venvPath + - nosetests.exe + coverage: '/^TOTAL.*\s+(\d+\%)$/' + +flake8: + stage: test + script: + - Enable-Venv $venvPath + - flake8.exe .\flask_log_request_id + +mypy: + stage: test + script: + - Enable-Venv $venvPath + - $reportFolder = Join-Path $cicdReportsFolderPath "mypy" + - New-Item -Path $reportFolder -Type Directory -Force + - $mypyResult = mypy ./flask_log_request_id --ignore-missing-imports --html-report $reportFolder --xml-report $reportFolder + - Write-Output "MyPy result = `"" + - $mypyResult | % { Write-Output $_} + - Write-Output "`"`nEnd of MyPy result." + - if ($mypyResult.count -gt 2) { + return $False + } +``` + +## .gitlab-ci.yml results from pipeline view + +![.gitlab-ci.yml results from pipeline view](../../assets/blog_images/2018-10-18-using-gitlab-integrated-cicd-for-python-project-on-windows/gitlab-ci.yml_result_from_pipeline_view.PNG) + +## .gitlab-ci.yml results from job view + +![.gitlab-ci.yml results from job view](../../assets/blog_images/2018-10-18-using-gitlab-integrated-cicd-for-python-project-on-windows/gitlab-ci.yml_result_from_job_view.PNG) + +## .gitlab-ci.yml results from merge_request view + +![.gitlab-ci.yml results from merge_request view](../../assets/blog_images/2018-10-18-using-gitlab-integrated-cicd-for-python-project-on-windows/gitlab-ci.yml_result_from_merge_request_view.PNG) diff --git a/docs/posts/2018/2018-11-01-setting-pwsh-invoke-webrequest-proxy.md b/docs/posts/2018/2018-11-01-setting-pwsh-invoke-webrequest-proxy.md new file mode 100644 index 00000000..b185f135 --- /dev/null +++ b/docs/posts/2018/2018-11-01-setting-pwsh-invoke-webrequest-proxy.md @@ -0,0 +1,38 @@ +--- +authors: +- copdips +categories: +- powershell +- proxy +comments: true +date: + created: 2018-11-01 +description: One-line command to set Powershell Core web cmdlets proxy. +--- + +# Setting Pwsh Invoke-WebRequest Proxy + +Different than Windows Powershell, Powershell Core doesn't [use the system proxy setting on Windows](https://copdips.com/2018/05/setting-up-powershell-gallery-and-nuget-gallery-for-powershell.html#configure-proxy-in-powershell). This post will show you an one-line command to set Powershell Core web cmdlets proxy. + + + +My office working environment is behind an Internet proxy, and I use [Scoop](https://github.com/lukesampson/scoop) to install many dev tools on my Windows desktop. + +Scoop is a [Chocolatey-like](https://github.com/lukesampson/scoop/wiki/Chocolatey-Comparison) Windows package management tool but its package sources are all on the Internet, there's no possibility to mirror the packages to a local repository. So I need to use the company Internet proxy to use the Scoop. + +!!! note + + In fact, there's one possibility to install packages by [using the local source control repo](https://github.com/lukesampson/scoop/wiki/Buckets#creating-your-own-bucket), I've never tested, it should be technically worked, and seems not very difficult to set up, but it needs to be maintained. + +Scoop uses mainly the `Invoke-WebRequest` cmdlet to download the package sources from the Internet, and it has already generously given a [wiki on how to configure proxy](https://github.com/lukesampson/scoop/wiki/Using-Scoop-behind-a-proxy), but I've switched to Powershell Core (pwsh.exe) since a while, and none of the methods given by the wiki works. + +After some googling, I finally find the [issue 3122](https://github.com/PowerShell/PowerShell/issues/3112) from the official Powershell Github repository, the collaborator [@markekraus](https://github.com/markekraus) gave a solution: + +```powershell +$PSDefaultParameterValues["invoke-webrequest:proxy"] = 'http://username:password@proxyserver:port' +``` + +!!! warning + + When giving the password as a plain text in a string, always use the single quotes to create the string, as some special characters ($, `, etc.) in the password might be evaluated by the string created by the double quotes. Otherwise pass the password as a variable into a double quoted string to convert it to a plain text. On Linux bash, we can see the same thing. + diff --git a/docs/posts/2018/2018-11-05-creating-multiple-redis-instance-services-on-windows.md b/docs/posts/2018/2018-11-05-creating-multiple-redis-instance-services-on-windows.md new file mode 100644 index 00000000..321341ea --- /dev/null +++ b/docs/posts/2018/2018-11-05-creating-multiple-redis-instance-services-on-windows.md @@ -0,0 +1,79 @@ +--- +authors: +- copdips +categories: +- powershell +- redis +comments: true +date: + created: 2018-11-05 +description: Creating multiple Redis instance as Windows service on the same Windows + server. +--- + +# Creating Multiple Redis Instance Services On Windows + +Even Salvatore Sanfilippo (creator of Redis) thinks it's [a bad idea](https://stackoverflow.com/a/36498590) to use multiple DBs in Redis. So we can install as many Redis instances as the number of DBs we need. This post will show you how to create multiple Redis instance as Windows service on the same Windows server. + + + +## Choose Redis Windows port version + +As mentioned by [the official doc](https://redislabs.com/ebook/appendix-a/a-3-installing-on-windows/a-3-1-drawbacks-of-redis-on-windows/), due to the lack of fork on Windows system, Redis is not officially supported on Windows. For Windows port version of Redis, we can use the one from : https://github.com/MicrosoftArchive/redis , currently the latest version is [v3.2.100](https://github.com/MicrosoftArchive/redis/releases/tag/win-3.2.100) which was released on Jul 1, 2016. + +## Create single Redis service on Windows + +[The official doc](https://redislabs.com/blog/redis-on-windows-8-1-and-previous-versions/) is good enough to get the job done. You can create the service by a simple command: + +```powershell +> redis-server --service-install +``` + +Or if you want to use a customized configuration: + +```powershell +> redis-server --service-install redis.windows.conf --loglevel verbose +``` + +BTW, if you want to use Redis in the Windows Subsystem for Linux (WSL) on Windows 10 or on Windows Server 2019, you can refer to [this official doc](https://redislabs.com/blog/redis-on-windows-10/). + +## Create multiple Redis services on Windows + +There's no many docs on the Internet telling you how to achieve that, in fact [the doc from the Github](https://github.com/MicrosoftArchive/redis/blob/3.0/Windows%20Service%20Documentation.md#naming-the-service) gives the answer. We should use the magic `--service-name`. + +```powershell +# Create redis service which name is redis_6381 and listens to the port tcp 6381 +> redis-server --service-install --service-name redis_6381 --port 6381 + +# Create redis service which name is redis_6382 and listens to the port tcp 6382 +> redis-server --service-install --service-name redis_6382 --port 6382 +``` + +We just created 2 Redis server services on Windows, the only difference between them is the ports they listen to. All the other configurations are the default ones. This provokes a problem. That is the [rdb dump file](https://redis.io/topics/persistence). The default configure set the rdb file name to dump.rdb, so both the redis services are using the same dump.rdb file which creates the file conflict in case of [SAVE command](https://redis.io/commands/save) or [BGSAVE command](https://redis.io/commands/bgsave). + +Due to above problem, we need to set each redis service uses its own rdb file. +In redis config, there're two configurations to control the rdb file. + +1. rbd file folder + + ```shell + # from redis-cli + config get dir + config set dir [new dir path] + ``` + +2. rdb file name + + ```shell + # from redis-cli + config get dbfilename + config set dbfilename [new db file name] + ``` + +Don't forget to set also the `maxmemory` and [`maxmemory-policy`](https://redis.io/topics/lru-cache) in order to avoid the out of memory issue. Redis' default `maxmemory` is set to `0` which means no limitation on used memory, and the default `maxmemory-policy` is set to `noeviction`, which means the Redis server returns errors when the memory limit was reached and the client is trying to execute commands that could result in more memory to be used. + +To get Redis memory usage, use : +```shell +# from redis-cli +info memory +``` diff --git a/docs/posts/2019/2019-04-01-creating-custom-python-request-auth-class.md b/docs/posts/2019/2019-04-01-creating-custom-python-request-auth-class.md new file mode 100644 index 00000000..c4b96a8b --- /dev/null +++ b/docs/posts/2019/2019-04-01-creating-custom-python-request-auth-class.md @@ -0,0 +1,134 @@ +--- +authors: +- copdips +categories: +- python +comments: true +date: + created: 2019-04-01 +description: Creating custom python request auth class with requests.auth.AuthBase. +--- + +# Creating Custom Python Request Auth Class + +When you need to use a complicated, or a non-standard API authentication method, +or your dev and prd environments don't use the same API authentication method, +it might be better to create a Python requests auth method to reduce your work. + + + +## Create the class MyAuth + +Suppose you have an API url at: https://httpbin.org/, +its authentication method is by request's headers where `headers["Authorization"] = username_password`. + +So the class MyAuth could be as following: + +**The most important is the `__call__()` method.** + +```python +import requests + + +def auth_header_value(username, password): + return "{}_{}".format(username, password) + + +class MyAuth(requests.auth.AuthBase): + # http://docs.python-requests.org/en/master/user/authentication/#new-forms-of-authentication + def __init__(self, username, password): + self.username = username + self.password = password + + def __call__(self, r: requests.Request): + # Implement my authentication + # http://docs.python-requests.org/en/master/_modules/requests/auth/ + r.headers["Authorization"] = auth_header_value(self.username, self.password) + return r + +# unittest +def test_myauth(): + username = "u1" + password = "p1" + auth = MyAuth(username, password) + url = "https://httpbin.org/" + # http://docs.python-requests.org/en/master/user/advanced/ + prepared_request = requests.Request("GET", url) + prepared_request_with_auth = auth.__call__(prepared_request) + assert prepared_request_with_auth.headers["Authorization"] == "{}_{}".format( + username, password + ) +``` + +## Use the class MyAuth + +## Without the class MyAuth + +We should directly provide the headers object in the request: + +```python +import requests + +username = "u1" +password = "p1" +url = "https://httpbin.org/" +headers = {"Authorization": "{}_{}".format(username, password)} +requests.get(url, headers=headers) +``` + +## With the class MyAuth + +We just need pass it to the param `auth`: + +```python +import requests +import MyAuth + +username = "u1" +password = "p1" +url = "https://httpbin.org/" +auth = MyAuth(username, password) +requests.get(url, auth=auth) +``` + +## Conditional MyAuth + +You may not find the power of the MyAuth from te above examples. True. +But suppose if your **dev API uses HTTPBasicAuth**, +and your **prd API uses a special key ("token") in the request's headers**. +And suppose you have many APIs to target in this manner. +What would you do without the class MyAuth ? Adding `if..else..` block everywhere ? + +With the class MyAuth, we just need to add only once `if..else..` block in the `__call__()` method. + +For example: + +```python +import requests + + +def auth_header_value(username, password): + return "{}_{}".format(username, password) + + +class MyAuth(requests.auth.AuthBase): + # http://docs.python-requests.org/en/master/user/authentication/#new-forms-of-authentication + def __init__(self, username, password, token, env): + # we must specify all the possible auth credentials here, + # and the variables (env) which allows to select the credential to use. + self.username = username + self.password = password + self.token = token + self.env = env + + def __call__(self, r: requests.Request): + # Implement my authentication + if env == "dev": + # http://docs.python-requests.org/en/master/_modules/requests/auth/ + r.headers['Authorization'] = requests.auth._basic_auth_str( + self.username, self.password + ) + elif env == "prd": + r.headers["token"] = self.token + return r +``` diff --git a/docs/posts/2019/2019-05-14-using-python-sqlalchemy-session-in-multithreading.md b/docs/posts/2019/2019-05-14-using-python-sqlalchemy-session-in-multithreading.md new file mode 100644 index 00000000..9625ca64 --- /dev/null +++ b/docs/posts/2019/2019-05-14-using-python-sqlalchemy-session-in-multithreading.md @@ -0,0 +1,154 @@ +--- +authors: +- copdips +categories: +- python +- multithreading +- sqlalchemy +comments: true +date: + created: 2019-05-14 + updated: 2021-03-21 +description: Using Python SQLAlchemy session in multithreading by using contextmanager + or scope_session. +--- + +# Using Python SQLAlchemy session in multithreading + +SQLAlchemy DB session is [not thread safe](https://docs.sqlalchemy.org/en/13/orm/session_basics.html#is-the-session-thread-safe). In this post, I will show you 2 ways to use it in a multithreading context. + + + +## Way 1 - Using contextmanager to create a session per thread + +Below is an example given by the official doc to show how to use the [contextmanager](https://docs.sqlalchemy.org/en/13/orm/session_basics.html#when-do-i-construct-a-session-when-do-i-commit-it-and-when-do-i-close-it) to construct, commit and close a SQLAlchemy session. + +```python +### another way (but again *not the only way*) to do it ### + +from contextlib import contextmanager + + +@contextmanager +def session_scope(): + """Provide a transactional scope around a series of operations.""" + session = Session() + try: + yield session + session.commit() + except: + session.rollback() + raise + finally: + session.close() + + +def run_my_program(): + with session_scope() as session: + ThingOne().go(session) + ThingTwo().go(session) +``` + +Suppose we have a function called `f1` which does something with the session. And we need to call `f1` in a multithreading context. +All we need to do is to add the `session_scope()` around the `f1`: + +```python +from contextlib import contextmanager +from multiprocessing.dummy import Pool as ThreadPool + +# db_utils is a python file that creats the Session by using the factory sessionmaker(), +# not shown here. +from db_utils import Session + + +@contextmanager +def session_scope(): + """Provide a transactional scope around a series of operations.""" + session = Session() + try: + yield session + session.commit() + except: + session.rollback() + raise + finally: + session.close() + + +def f1(session, number): + # do something around the session and the number... + + +def thread_worker(number): + # We're using the session context here. + with session_scope() as session: + f1(session, number) + + +def work_parallel(numbers, thread_number=4): + pool = ThreadPool(thread_number) + results = pool.map(thread_worker, numbers) + # If you don't care about the results, just comment the following 3 lines. + # pool.close() + # pool.join() + # return results + + +if __name__ == "__main__": + numbers = [1, 2, 3] + work_parallel(numbers, 8) +``` + +## Way 2 - Using scoped_session to create a thread-local variable + +[https://docs.sqlalchemy.org/en/13/orm/contextual.html#contextual-thread-local-sessions](https://docs.sqlalchemy.org/en/13/orm/contextual.html#contextual-thread-local-sessions) + +> The scoped_session object is a very popular and useful object used by many SQLAlchemy applications. However, it is important to note that it presents only one approach to the issue of Session management. If you’re new to SQLAlchemy, and especially if the term “thread-local variable” seems strange to you, we recommend that if possible you familiarize first with an off-the-shelf integration system such as Flask-SQLAlchemy or zope.sqlalchemy. + +```python +from multiprocessing.dummy import Pool as ThreadPool + +from sqlalchemy.orm import scoped_session +from sqlalchemy.orm import sessionmaker + + +def f1(number): + # now all calls to Session() will create a thread-local session. + # If we call upon the Session registry a second time, we get back the same Session. + session = Session() + # do something around the session and the number... + + # You can even directly use Session to perform DB actions. + # See: https://docs.sqlalchemy.org/en/13/orm/contextual.html#implicit-method-access + # when methods are called on the Session object, they are proxied to the underlying Session being maintained by the registry. + + +def thread_worker(number): + f1(number) + + +def work_parallel(numbers, thread_number=4): + pool = ThreadPool(thread_number) + results = pool.map(thread_worker, numbers) + # If you don't care about the results, just comment the following 3 lines. + # pool.close() + # pool.join() + # return results + + +if __name__ == "__main__": + engine = create_engine("postgresql://scott:tiger@localhost/mydatabase") + session_factory = sessionmaker(bind=engine) + + # The Session object created here will be used by the function f1 directly. + Session = scoped_session(session_factory) + + numbers = [1, 2, 3] + work_parallel(numbers, 8) + + Session.remove() +``` + +## Bonus - How the Python web frameworks work with SQLAlchemy thread local scope + +[https://docs.sqlalchemy.org/en/13/orm/contextual.html#using-thread-local-scope-with-web-applications](https://docs.sqlalchemy.org/en/13/orm/contextual.html#using-thread-local-scope-with-web-applications) diff --git a/docs/posts/2019/2019-06-19-git-cheat-sheet.md b/docs/posts/2019/2019-06-19-git-cheat-sheet.md new file mode 100644 index 00000000..39e5772f --- /dev/null +++ b/docs/posts/2019/2019-06-19-git-cheat-sheet.md @@ -0,0 +1,282 @@ +--- +authors: +- copdips +categories: +- git +comments: true +date: + created: 2019-06-19 + updated: 2023-06-02 +description: Some personal often forgotten git commands. +--- + +# Git Cheat Sheet + +This is not a complete Git cheat sheet for everyone, this is just a personal cheat sheet for some often forgotten git commands. + + + +> +> + +## Alias + +User level alias + +Edit `~/.gitconfig` + +```ini +git config --global alias.st status +git config --global alias.lga log --graph --decorate --oneline --all +git config --global alias.co checkout +git config --global alias.last log -1 HEAD +git config --global alias.ci commit +git config --global alias.unstage reset HEAD +git config --global alias.ll "log --graph --all --pretty=format:'%C(auto)%h%Creset %an: git config --global %s - %Creset %C(auto)%d%Creset %C(bold black)(%cr)%Creset %C(bold git config --global black)(%ci)%Creset' --no-abbrev-commit" +git config --global alias.sh show +git config --global alias.df diff +git config --global alias.br branch +git config --global alias.cm checkout main +git config --global alias.cd checkout dev +git config --global alias.rum pull --rebase upstream main +git config --global alias.rud pull --rebase upstream dev +git config --global alias.rom pull --rebase origin main +git config --global alias.rod pull --rebase origin dev +``` + +## ~/.bashrc + +```bash +alias gitpush='git ci -am $GIT_BRANCH ; git push origin $GIT_BRANCH' +alias gitamendpush='git add . ; git amend ; git push origin $GIT_BRANCH -f' +alias gitrebasemain='git cm ; git rom ; git fetch origin --prune ; git br -d $GIT_BRANCH' +alias gitrebasedev='git cd ; git rod ; git fetch origin --prune ; git br -d $GIT_BRANCH' +``` + +## Restore + +### Restore a file to an old version + +```bash +git restore --source [old_commit_hash] [file_name] +``` + +### Restore a deleted branch + +```bash +git reflog +git branch [branch_name] [commit_hash_that_preceded_the_delete_commit] +``` + +## Undo + +```mermaid! +flowchart LR + A(Working directory) -->|"git add"| B(Staging area) + B -->|"git commit"| C(Commit) + C -->|"git reset --soft HEAD~
(cannot reset single files)"| B + C -->|"git reset HEAD~"| A + B -->|"git restore --staged
git reset
git reset HEAD"| A + C -->|"git reset --hard"| D(/dev/null) + A -->|"git checkout"| D + D -->|"git reflog
git cherry-pick [commit]"| C +``` + +### Discard changes in working directory + +```bash +# discard changes to a file in working directory +git checkout + +# discard changes to all files in working directory +git checkout . +# or +git checkout * +``` + +!!! note + + Untracked files cannot be discarded by checkout. + +### Discard last commit (completely remove) + +```bash +# better to show git log history before using --hard for rollback purpose. +git reset --hard HEAD~ +``` + +!!! note + + We can recover the commit discarded by `--hard` with the `git cherry-pick [commit number]` if we displayed or saved it before. Whatever you can also use `git reflog` to get the commit number too. + +### Unstage from staging area + +[StackOverflow: How do I undo git add before commit?](https://stackoverflow.com/questions/348170/how-do-i-undo-git-add-before-commit) + +```bash +# unstage a file from staging area +git reset + +# unstage all files from staging area +git reset +``` + +!!! note + + No more need to add `HEAD` like `git reset HEAD ` and `git reset HEAD` since git v1.8.2. + +!!! warning + + Do not use `git rm --cached ` to unstage, it works only for newly created file to remove them from the staging area. But if you specify a existing file, it will delete it from cache, even if it is not staged. + +### Undo commit to working directory + +[StackOverflow: How do I undo the most recent local commits in Git?](https://stackoverflow.com/questions/927358/how-do-i-undo-the-most-recent-local-commits-in-git) + +You should readd the files if you want to commit them, as they're in the working directory now, they're unstaged too. + +```bash +# Undo last commit to working directory +git reset HEAD~ +# same as to +git reset HEAD~1 + +# Undo last 2 commits to working directory +git reset HEAD~2 + +# Undo till a special commit to working directory, +# the special commit and every commits before are still committed. +git reset + + + +``` + +!!! note + + `git reset HEAD` will do nothing, as the HEAD is already at the last commit. + +!!! note + + `git reset HEAD~1 ` will create a delete file index in staging area. Normally we don't need this command. + +### Undo commit to staging area + +[StackOverflow: How do I undo the most recent local commits in Git?](https://stackoverflow.com/questions/927358/how-do-i-undo-the-most-recent-local-commits-in-git) + +Add `--soft` to `git reset` to undo commit to staging area. + +### Undo staging to working directory + +```bash +# used after a git add +git restore --staged +git reset +``` + +## Authentication + +### With bearer token + +```bash +# https://learn.microsoft.com/en-us/azure/devops/integrate/get-started/authentication/service-principal-managed-identity?view=azure-devops#q-can-i-use-a-service-principal-to-do-git-operations-like-clone-a-repo +git -c http.extraheader="AUTHORIZATION: bearer $ServicePrincipalAadAccessToken" clone https://dev.azure.com/{yourOrgName}/{yourProjectName}/_git/{yourRepoName} +``` + +## Branch + +### Force local branch to the same with remote branch + +```bash +git reset --hard upstream/master +or +git checkout -B master origin/master # sometimes this one might not work +``` + +### get last commit of another local branch + +```bash +git cherry-pick another_local_branch +``` + +### get all commits of another local other_branch + +```bash +get rebase another_local_branch +``` + +## Show diff + +### show content in staging area + +```bash +git diff --cached +``` + +### show content in the last commit local repository + +```bash +git show +git show HEAD +``` + +### show content in the second last commit in local repository + +```bash +git show HEAD~ +git show HEAD~1 +``` + +## Disable host key checking + +Sometimes during CICD, we need to use git to do something, if the remote repository is accessed by SSH, the first time when you use git (git clone for example), you need to accept the remote host key. This might be a problem for CICD as it cannot type `Y` for you as you do in an interactive session. To let git to disable the host key checking or precisely accept automatically the remote host key, you need to add the following line in git config: + +```bash +> git config --global core.sshcommand 'ssh -i [YouPrivateKeyPath] -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -F /dev/null' +``` + +You may need to use `git config --system` to set the config at system level. + +## Proxy + +Usually, in an enterprise environment, we need to use a proxy to connect to the Internet resources. +And from Powershell, we can ask Powershell to [inherit the IE proxy settings](https://copdips.com/2018/05/setting-up-powershell-gallery-and-nuget-gallery-for-powershell.html#configure-proxy-in-powershell). + +With this proxy setting in Powershell, we should be able to use `git clone` to connect to the external www.github.com or to the internally hosted for example https://github.your_enterprise.local/ + +But trust me, some enterprises' proxy settings (often for those who use a [.pac](https://en.wikipedia.org/wiki/Proxy_auto-config) file) are so complicated that Powershell cannot use the proxy the same way as IE. + +In such case, fortunately, git has its own proxy setting. I think the [official doc](https://git-scm.com/docs/git-config) doesn't explain very well how to set the proxy. But this [gist](https://gist.github.com/evantoli/f8c23a37eb3558ab8765) gives some good examples. + +So, normally, you just need to set this config to ask git to use the `$internet_proxy` only for the url github.com, and all the other urls, git won't use the proxy. + +```bash +git config --global http.https://github.com.proxy $internet_proxy +``` + +## GUI + +GitForWindows ships with a GUI tool, very cool. + +```bash +# start git gui tool +git gui +``` + +![git-gui](../../assets/blog_images/2019-06-19-git-cheat-sheet/git-gui.PNG) + +## Pull Requests with Rebase + +Azure devops doc: [https://devblogs.microsoft.com/devops/pull-requests-with-rebase/](https://devblogs.microsoft.com/devops/pull-requests-with-rebase/) + +## Moving Git repository content to another repository preserving history + +```bash +# https://stackoverflow.com/a/55907198/5095636 +# this keeps all commits history and git tags +$ git clone --bare https://github.com/exampleuser/old-repository.git +$ cd old-repository.git +$ git push --mirror https://github.com/exampleuser/new-repository.git +$ cd - +$ rm -rf old-repository.git +``` diff --git a/docs/posts/2019/2019-07-07-installing-readthedocs-on-ubuntu-1804.md b/docs/posts/2019/2019-07-07-installing-readthedocs-on-ubuntu-1804.md new file mode 100644 index 00000000..6e71ca20 --- /dev/null +++ b/docs/posts/2019/2019-07-07-installing-readthedocs-on-ubuntu-1804.md @@ -0,0 +1,51 @@ +--- +authors: +- copdips +categories: +- python +- ubuntu +comments: true +date: + created: 2019-07-07 +description: Not a step by step tutorial, just some tips and tricks. +draft: true +--- + +# Installing Readthedocs (RTD) On Ubuntu 1804 + +This is not a step by step tutorial to install Readthedocs, but just some tips and tricks. + + + +## Official doc + +The installation tutorial follows the [official doc](https://docs.readthedocs.io/en/stable/development/install.html). + +On the date of writing this post, the official doc is at python v3.7. + +## Installing python3 virtualenv + +```bash +sudo apt-get install python3-pip +sudo pip3 install virtualenv + +``` + +## Installing redis-server + +The official doc asks to install redis-sever, but default Ubuntu 1804 installation wont find the package + +```bash +sudo apt install redis-server + +Reading package lists... Done +Building dependency tree +Reading state information... Done +E: Unable to locate package redis-server +``` + +We need to enable the universal package by: + +```bash +sudo add-apt-repository universe +``` diff --git a/docs/posts/2019/2019-07-13-filtering-pandas-dataframe.md b/docs/posts/2019/2019-07-13-filtering-pandas-dataframe.md new file mode 100644 index 00000000..7432240d --- /dev/null +++ b/docs/posts/2019/2019-07-13-filtering-pandas-dataframe.md @@ -0,0 +1,272 @@ +--- +authors: +- copdips +categories: +- python +- pandas +comments: true +date: + created: 2019-07-13 +description: Filtering a pandas dataframe with series, query, or numpy methods. +--- + +# Filtering In Pandas Dataframe + +Pandas dataframe is like a small database, +we can use it to inject some data and do some in-memory filtering without any external SQL. +This post is much like a summary of this [StackOverflow thread](https://stackoverflow.com/questions/17071871/select-rows-from-a-dataframe-based-on-values-in-a-column-in-pandas). + + + +## Building dataframe + +```python +In [1]: import pandas as pd + ...: import numpy as np + ...: df = pd.DataFrame({'A': 'foo bar foo bar foo bar foo foo'.split(), + ...: 'B': 'one one two three two two one three'.split(), + ...: 'C': np.arange(8), 'D': np.arange(8) * 2}) + +In [2]: df +Out[2]: + A B C D +0 foo one 0 0 +1 bar one 1 2 +2 foo two 2 4 +3 bar three 3 6 +4 foo two 4 8 +5 bar two 5 10 +6 foo one 6 12 +7 foo three 7 14 +``` + +## Some basic filtering conditions + +### Filtering by A = 'foo' + +```python +In [3]: df[df.A == 'foo'] +Out[3]: + A B C D +0 foo one 0 0 +2 foo two 2 4 +4 foo two 4 8 +6 foo one 6 12 +7 foo three 7 14 +``` + +### Filtering by A = 'foo' and B = 'one' + +```python +In [4]: df[(df.A == 'foo') & (df.B == 'one')] +Out[4]: + A B C D +0 foo one 0 0 +6 foo one 6 12 +``` + +### Filtering by A = 'foo' or B = 'one' + +```python +In [5]: df[(df.A == 'foo') | (df.B == 'one')] +Out[5]: + A B C D +0 foo one 0 0 +1 bar one 1 2 +2 foo two 2 4 +4 foo two 4 8 +6 foo one 6 12 +7 foo three 7 14 +``` + +## Different ways to achieve the same filtering + +Let's take the example for filtering by `A = 'foo' and B = 'one'` + + + +### Column as dataframe property + +```python +In [4]: df[(df.A == 'foo') & (df.B == 'one')] +Out[4]: + A B C D +0 foo one 0 0 +6 foo one 6 12 +``` + +### Column as dataframe dict key + +```python +In [7]: df[(df['A'] == 'foo') & (df['B'] == 'one')] +Out[7]: + A B C D +0 foo one 0 0 +6 foo one 6 12 +``` + +### Using multiple single filters + +```python +In [16]: df[df.A == 'foo'][df.B == 'one'] +C:\Users\xiang\AppData\Local\PackageManagement\NuGet\Packages\python.3.7.0\tools\Scripts\ipython:1: UserWarning: Boolean Series key will be reindexed to match DataFrame index. +Out[16]: + A B C D +0 foo one 0 0 +6 foo one 6 12 +``` + +### Using numpy array + +```python +In [24]: df[(df.A.values == 'foo') & (df.B.values == 'one')] +Out[24]: + A B C D +0 foo one 0 0 +6 foo one 6 12 +``` + +### Using isin function + +```python +In [9]: df[( df['A'].isin(['foo']) ) & ( df['B'].isin(['one']) )] +Out[9]: + A B C D +0 foo one 0 0 +6 foo one 6 12 +``` + +### Using underlying numpy in1d function + +```python +In [25]: df[(np.in1d(df['A'].values, ['foo'])) & (np.in1d(df['B'].values, ['one']))] +Out[25]: + A B C D +0 foo one 0 0 +6 foo one 6 12 +``` + +### Using query API (developer friendly) + +```python +In [10]: df.query("(A == 'foo') & (B == 'one')") +Out[10]: + A B C D +0 foo one 0 0 +6 foo one 6 12 +``` + +### Using numpy where function and dataframe iloc positional indexing + +```python +In [20]: df.iloc[np.where( (df.A.values=='foo') & (df.B.values=='one') )] +Out[20]: + A B C D +0 foo one 0 0 +6 foo one 6 12 +``` + +### Using xs label indexing + +The Syntax is too complicated. + +## Developer friendly filtering + +As mentioned previously, the [query API method](https://copdips.com/2019/07/filtering-pandas-dataframe.html#using-query-api-developer-friendly) is a developer friendly filtering method. + +Why? All the other methods must include the original `df` object in the filter. If we have a dynamic filter conditions, it will be difficult to generate the filters ([pandas Series](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.Series.html?highlight=series)) with the `df` object. I haven't found the solution to build this kind of filter by looping over a Python dict. + +For example: + +The filter is based on a Python dict, `the key of the dict` corresponds to the dataframe column, and `the value of the dict` corresponds to the value to dataframe column value to filter. One more context, if the value is None, don't filter on the corresponding key (column). + +Suppose the filter dict is like this one: + +```python +filter_dict = {'A': 'foo', 'B': 'one', 'C': None, 'D': None} +``` + +By using `df` object in the filter, we should see something like this: + +```python +df[(df['A'] == 'foo') & (df['B'] == 'one')] +``` + +It's easy to type manually the filter directly from a shell (ipython or jupyter as you like), but how you build the same filter from a Python script ? Not simple. + +Please let me know if you have any suggestions :) + + + +But with the query API, we just need to convert the `filter_dict` to a string like: `"(A == 'foo') & (B == 'one')"`. This is pretty easy in pure Python: + +```python +In [32]: filter_dict = {'A': 'foo', 'B': 'one', 'C': None, 'D': None} + +In [33]: filter_string = " & ".join(["{} == '{}'".format(k,v) for k,v in filter_dict.items() if v is not None]) + +In [34]: filter_string +Out[34]: "A == 'foo' & B == 'one'"} +``` + +## Benchmark + +You can get the benchmark from this [StackOverflow thread](https://stackoverflow.com/a/46165056). + +Generally speaking, except for `the query API` and `the xs label indexing` methods, all the others are fast. + +!!! note + + But for a large quantity of data, the query API becomes pretty fast. + +Some benchmarks I tested from my laptop: + +### For 8 lines of data + +```python +In [35]: import pandas as pd + ...: import numpy as np + ...: df = pd.DataFrame({'A': 'foo bar foo bar foo bar foo foo'.split(), + ...: 'B': 'one one two three two two one three'.split(), + ...: 'C': np.arange(8), 'D': np.arange(8) * 2}) + +In [36]: %timeit df.query("(A == 'foo') & (B == 'one')") +1.48 ms ± 35.1 µs per loop (mean ± std. dev. of 7 runs, 1000 loops each) + +In [37]: %timeit df[df.A == 'foo'][df.B == 'one'] +1.01 ms ± 33.7 µs per loop (mean ± std. dev. of 7 runs, 1000 loops each) + +In [38]: %timeit df[(df.A == 'foo') & (df.B == 'one')] +688 µs ± 48.3 µs per loop (mean ± std. dev. of 7 runs, 1000 loops each) + +In [39]: %timeit df[(df.A.values == 'foo') & (df.B.values == 'one')] +248 µs ± 15 µs per loop (mean ± std. dev. of 7 runs, 1000 loops each) + +In [40]: %timeit df.iloc[np.where( (df.A.values=='foo') & (df.B.values=='one') )] +287 µs ± 20.8 µs per loop (mean ± std. dev. of 7 runs, 1000 loops each) +``` + +### For 30k lines of data + +```python +In [51]: import pandas as pd + ...: import numpy as np + ...: df = pd.DataFrame({'A': ('foo bar ' * 15000).split(), + ...: 'B': ('one one two two three three ' * 5000).split(), + ...: 'C': np.arange(30000), 'D': np.arange(30000) * 2}) + +In [52]: %timeit df.query("(A == 'foo') & (B == 'one')") +2.83 ms ± 373 µs per loop (mean ± std. dev. of 7 runs, 100 loops each) + +In [53]: %timeit df[df.A == 'foo'][df.B == 'one'] +6.51 ms ± 230 µs per loop (mean ± std. dev. of 7 runs, 100 loops each) + +In [54]: %timeit df[(df.A == 'foo') & (df.B == 'one')] +5.58 ms ± 480 µs per loop (mean ± std. dev. of 7 runs, 100 loops each) + +In [55]: %timeit df[(df.A.values == 'foo') & (df.B.values == 'one')] +1.47 ms ± 58 µs per loop (mean ± std. dev. of 7 runs, 1000 loops each) + +In [56]: %timeit df.iloc[np.where( (df.A.values=='foo') & (df.B.values=='one') )] +1.5 ms ± 38.5 µs per loop (mean ± std. dev. of 7 runs, 1000 loops each) +``` diff --git a/docs/posts/2019/2019-07-21-learning-flask.md b/docs/posts/2019/2019-07-21-learning-flask.md new file mode 100644 index 00000000..090df89f --- /dev/null +++ b/docs/posts/2019/2019-07-21-learning-flask.md @@ -0,0 +1,106 @@ +--- +authors: +- copdips +categories: +- python +- flask +comments: true +date: + created: 2019-07-21 +description: Learning Flask. +draft: true +--- + +# Learning Flask + +Each [official Flask doc](https://flask.palletsprojects.com/en/master/) page worths to read. + + + +## Request variables + +### Variable rules + +Flask provides by default [some basic converters](https://flask.palletsprojects.com/en/master/quickstart/#variable-rules): + +- string (this is the default one and the string will be converted to an unicode string) (regex: '[^/]+') +- int +- float +- path (regex: '[^/].*?') +- uuid +- any (regex: '[^/]+') (same as string) + +An example by converting the input string to an uuid: + +```python +@app.route('/api/job_id/') +def job(job_id): + return str(type(job_id)) +``` + +Test with uuid in string as input: + +```powershell +6.2.2> curl.exe 127.0.0.1:5000/api/job/$(new-guid) + +``` + +Test with int in string as input, this should throw an error as int cannot be converted to an uuid: + +```powershell +6.2.2> curl.exe 127.0.0.1:5000/api/job/1 + +404 Not Found +

Not Found

+

The requested URL was not found on the server. If you entered the URL manually +please check your spelling and try again.

+``` + +We can define some custom converters, but it is not suggested. Keep things explicit and simple. + +## Debug + +### Auto start Flask in debug mode + +Flask can be started in [the debug mode](https://flask.palletsprojects.com/en/master/quickstart/#debug-mode) by setting a env var: `FLASK_DEBUG=1` or `FLASK_ENV=development` + +To prevent from setting this var every time, we can use the combination of: + +- the **.env** file: put `FLASK_DEBUG=1` or `FLASK_ENV=development` (case sensitive) inside. +- the **python-dotenv** module: `pip install python-dotenv`. + +In the end, just from your shell, type `python app.py` to start the Flask in debug mode, or open the `app.py` file in VSCode, then press `F4` to start it. + +!!! warning + + Don't use this setting in a production environment. + +## URL + +### Trailing slash + +- If the endpoint is to return a list of something, **add** the trailing slash (`/api/jobs/`). + > calling `/api/jobs` will [be redirected to](https://flask.palletsprojects.com/en/master/quickstart/#unique-urls-redirection-behavior) `/api/jobs/`. +- If the endpoint is to return a single element, **do not add** the trailing slash (`/api/job/`). + > calling `/api/job//` will throw a **404 Not Found** error. + +## Response + +We can sent the response by `jsonify()` function which creates a Flask Response object. + +```python +from flask import jsonify + +@app.route('/api/hello/') +def hello(name): + response = jsonify({'Hello': name}) + return response +``` + +Otherwise, Flask can try to parse some objects into Response object with following rules: + +- `str`: The data gets encoded as UTF-8 and used as the HTTP response body. +- `bytes/bytesarray`: Used as the body. +- `A (response, status, headers) tuple`: Where response can be a Response object or one of the previous types. **status is an integer value** that overwrites the response status, and headers is a mapping that extends the response headers. +- `A (response, status) tuple`: Like the previous one, but without specific headers +- `A (response, headers) tuple`: Like the preceding one, but with just extra headers diff --git a/docs/posts/2019/2019-07-30-troubleshooting-python-twine-cannot-upload-package-on-windows.md b/docs/posts/2019/2019-07-30-troubleshooting-python-twine-cannot-upload-package-on-windows.md new file mode 100644 index 00000000..d032bcc1 --- /dev/null +++ b/docs/posts/2019/2019-07-30-troubleshooting-python-twine-cannot-upload-package-on-windows.md @@ -0,0 +1,170 @@ +--- +authors: +- copdips +categories: +- python +- package +comments: true +date: + created: 2019-07-30 +description: Python twine uses ~/.pypirc as its default config file, but for some + reasons it doesn't work on Windows. +--- + +# Troubleshooting Python Twine Cannot Upload Package On Windows + +Python has several tools to upload packages to PyPi or some private Artifactory locations. The mostly used one should be [twine](https://twine.readthedocs.io/en/latest/). Although twine is not a Python originate tool, but it's officially [recommended by Python.org](https://packaging.python.org/tutorials/packaging-projects/#uploading-the-distribution-archives). + +## Building the package + +Just a quick callback on how to build the pacakge. We need to create a file named [setup.py](https://docs.python.org/3.7/distutils/setupscript.html) at the root of the app. Use another file named [MANIFEST.IN](https://docs.python.org/3/distutils/sourcedist.html#specifying-the-files-to-distribute) to include the non-code files to the package. Don't forget to set [`include_package_data=True`](https://python-packaging.readthedocs.io/en/latest/non-code-files.html) in `setup.py` + +**Wheel** + + + +> +A [Built Distribution](https://packaging.python.org/glossary/#term-built-distribution) format introduced by [PEP 427](https://www.python.org/dev/peps/pep-0427), which is intended to replace the [Egg](https://packaging.python.org/glossary/#term-egg) format. Wheel is currently supported by [pip](https://packaging.python.org/key_projects/#pip). + + + +Before the build, ensure that [`version`](https://packaging.python.org/guides/distributing-packages-using-setuptools/#choosing-a-versioning-scheme) key in `setup.py` is well defined. + +```powershell +# to build a python wheel package +# sdist will generate a .tar.gz file in dist/ +# bdist_wheel will generate a .whl file in dist/ +python setup.py sdist bdist_wheel +``` + +## Upload built package to PyPi or private Artifactory. + +We use twine to upload the Python packages. Before using it, we need to create a file name [`.pypirc` in `~/`](https://github.com/pypa/twine/blob/master/twine/utils.py#L57). + +There's [an example from jfrog for .pypirc](https://www.jfrog.com/confluence/display/RTF/PyPI+Repositories#PyPIRepositories-PublishingtoArtifactory). + +Then, we can upload the package by: + +```powershell +# -r dev, dev is a repo defined in the ~/.pypirc file. +6.2.0> twine upload dist/* -r dev --cert [path_of_artifactory_site_cert_bundle_full_chain_in_pem_format_it_seems_that_no_param_to_ignore_ssl_error_with_twine] +``` + +### .pypirc path error + +Unfortunately, on Windows OS, you might get following error message: + +```powershell +6.2.0> twine upload dist/* --cert [artifactory_site_cert_full_chain_in_pem_format] -r dev + +InvalidConfiguration: Missing 'dev' section from the configuration file or not a complete URL in --repository-url. +Maybe you have a out-dated '~/.pypirc' format? +more info: https://docs.python.org/distutils/packageindex.html#pypirc +``` + +This error is too generic, one of the reasons is because twine cannot find the file `~/.pypirc`, but if you check by `get-content ~/.pypirc`, it exits. + +The reason for this error is that if you're on Windows, and `$env:HOME` exists and doesn't point to the same location as `$env:USERPROFILE`. + +twine uses `$env:HOME` as `~/` as per [os.path.expanduser()](https://github.com/pypa/twine/blob/579f3fe60f2333972ba0260f44033ee1889ca3ca/twine/utils.py#L70), but Windows powershell uses `$env:USERPROFILE` as `~/`. `$env:HOME` is not set by Windows by default. And Windows administrators often use `$env:HOME` to redirect the user roaming profile. + +### .pypirc path error reason + +1. Firstly, I set $env:HOME to a temp file, so it is differnet than $env:USERPROFILE + + ```powershell + # Initially $env:HOME doesn't exist + 6.2.0> Get-ChildItem env: | Out-String -st | Select-String 'userpro|home' + + ANDROID_SDK_HOME C:\Android + HOMEDRIVE C: + HOMEPATH \Users\xiang + USERPROFILE C:\Users\xiang + + 6.2.0> $env:HOME = 'c:/temp' + + # now, we have $env:HOME which is different than $env:USERPROFILE + 6.2.0> Get-ChildItem env: | Out-String -st | Select-String 'userpro|home' + + ANDROID_SDK_HOME C:\Android + HOME c:/temp + HOMEDRIVE C: + HOMEPATH \Users\xiang + USERPROFILE C:\Users\xiang + ``` + +1. Check ~/ in Python + ```python + In [1]: import os + + In [2]: os.path.expanduser('~/') + Out[2]: 'c:/temp/' + ``` + +2. Check ~/ in Powershell + ```powershell + 6.2.0> Resolve-Path ~/ + + Path + ---- + C:\Users\xiang + ``` + +So if we created the .pypirc file in `~/` in Powershell, twine won't find it. + +### Why os.path.expanduser() doesn't resolve the same ~/ as Powershell + +As shown previsouly, Windows Powershell resolves `~/` as `$env:USERPROFILE`. How about os.path.expanduser()? Let's check its source code by the `inspect` module. + +```python +In [1]: import os ; print(inspect.getsource(os.path.expanduser)) +def expanduser(path): + """Expand ~ and ~user constructs. + + If user or $HOME is unknown, do nothing.""" + path = os.fspath(path) + if isinstance(path, bytes): + tilde = b'~' + else: + tilde = '~' + if not path.startswith(tilde): + return path + i, n = 1, len(path) + while i < n and path[i] not in _get_bothseps(path): + i += 1 + + if 'HOME' in os.environ: + userhome = os.environ['HOME'] + elif 'USERPROFILE' in os.environ: + userhome = os.environ['USERPROFILE'] + elif not 'HOMEPATH' in os.environ: + return path + else: + try: + drive = os.environ['HOMEDRIVE'] + except KeyError: + drive = '' + userhome = join(drive, os.environ['HOMEPATH']) + + if isinstance(path, bytes): + userhome = os.fsencode(userhome) + + if i != 1: #~user + userhome = join(dirname(userhome), path[1:i]) + + return userhome + path[i:] + +In [2]: +``` + +From the source code, obviously, if `$env:HOME` exists, expanduser() will return its value. If `$env:HOME` doesn't exists, it falls back to `$env:USERPROFILE`, if not again, it falls back to `$env:HOMEDRIVE/$env:HOMEPATH`. + +### Solutions + +We have 3 solutions. + +1. use [`twine --config-file`](https://twine.readthedocs.io/en/latest/#twine-upload) to manually specify the .pypirc config file. + +1. if `$env:HOME` exists, copy the `.pypirc` file to `$env:HOME`, otherwise to `$env:USERPROFILE`. + +1. declare all the upload params as [environment variables](https://twine.readthedocs.io/en/latest/#environment-variables). diff --git a/docs/posts/2019/2019-09-14-fast-tcp-port-check-in-powershell.md b/docs/posts/2019/2019-09-14-fast-tcp-port-check-in-powershell.md new file mode 100644 index 00000000..b07725ab --- /dev/null +++ b/docs/posts/2019/2019-09-14-fast-tcp-port-check-in-powershell.md @@ -0,0 +1,142 @@ +--- +authors: +- copdips +categories: +- powershell +- network +comments: true +date: + created: 2019-09-14 + updated: 2019-12-31 +description: Test-NetConnection is too slow if the remote port is not opened due to + its timeout setting. Use System.Net.Sockets.TcpClient instead. +--- + +# A fast way to check TCP port in Powershell + +The [Test-NetConnection](https://docs.microsoft.com/en-us/powershell/module/nettcpip/test-netconnection) cmdlet is great and verbose but too slow if the remote port to check is not opened. This is due to its timeout setting and cannot be modified. In this port, I will show you a custom function that leverages the power of [System.Net.Sockets.TcpClient](https://docs.microsoft.com/dotnet/api/system.net.sockets.tcpclient) to accelerate the port test. + +!!! note + + **Update 2019-12-31**: I didn't mention `Test-Connection` previously because although it has the parameter `-TimeoutSeconds`, its output only has `True` or `False`. What a pity. But things are going to be changed, as per this [github issue](https://github.com/PowerShell/PowerShell/issues/11440), [@jackdcasey](https://github.com/jackdcasey) is preparing a pull request to make Test-Connection's output verbose enough. + +## Test-NetConnection is slow if the port is not opened + +If the port is opened, it's OK. + +```powershell +# if the port is opened +6.2.2> Measure-Command {Test-NetConnection www.google.fr -Port 80} | % TotalSeconds +0,2015152 +``` + +But if the port is not opened, it would be better to take a coffee to wait for the result. + +```powershell +# if the port is not opened +6.2.2> Measure-Command {Test-NetConnection www.google.fr -Port 123} | % TotalSeconds +WARNING: TCP connect to (2a00:1450:4007:805::2003 : 123) failed +WARNING: TCP connect to (172.217.18.195 : 123) failed +42,5026257 +``` + +For most of the cases, we only need to test a TCP port in a fast network (often LAN), waiting for 42 seconds is ridiculous, but unfortunately, Test-NetConnection doesn't provide a parameter to decrease the timeout. + +## System.Net.Sockets.TcpClient is fast + +*"Talk is cheap. Show me the code."* + + + +### Test-Port demos + +```powershell +# if the port is opened +6.2.2> Measure-Command {Test-Port www.google.fr 80} | % TotalSeconds +0,0648323 + +# if the port is not opened +6.2.2> Measure-Command {Test-Port www.google.fr 123} | % TotalSeconds +1,0072371 + +# it works with pipeline too +6.2.2> Measure-Command {"www.google.fr:80", "www.orange.fr:123", "www.free.fr" | Test-Port} | % TotalSeconds +2,0201628 + +# the output of the Test-Port, the default port to check is TCP 5985 +6.2.2> "www.google.fr:80", "www.orange.fr:123", "www.free.fr" | Test-Port | ft -a + +RemoteHostname RemotePort PortOpened TimeoutInMillisecond SourceHostname OriginalComputerName +-------------- ---------- ---------- -------------------- -------------- -------------------- +www.google.fr 80 True 1000 DELL-ZX www.google.fr:80 +www.orange.fr 123 False 1000 DELL-ZX www.orange.fr:123 +www.free.fr 5985 False 1000 DELL-ZX www.free.fr +``` + +### Test-Port source code + +The code is still in POC, there're still many parts to improve. For example, validating the given $ComputerName by resolving its IP, and error handling, etc. + +```powershell +function Test-Port { + [CmdletBinding()] + param ( + [Parameter(ValueFromPipeline = $true, HelpMessage = 'Could be suffixed by :Port')] + [String[]]$ComputerName, + + [Parameter(HelpMessage = 'Will be ignored if the port is given in the param ComputerName')] + [Int]$Port = 5985, + + [Parameter(HelpMessage = 'Timeout in millisecond. Increase the value if you want to test Internet resources.')] + [Int]$Timeout = 1000 + ) + + begin { + $result = [System.Collections.ArrayList]::new() + } + + process { + foreach ($originalComputerName in $ComputerName) { + $remoteInfo = $originalComputerName.Split(":") + if ($remoteInfo.count -eq 1) { + # In case $ComputerName in the form of 'host' + $remoteHostname = $originalComputerName + $remotePort = $Port + } elseif ($remoteInfo.count -eq 2) { + # In case $ComputerName in the form of 'host:port', + # we often get host and port to check in this form. + $remoteHostname = $remoteInfo[0] + $remotePort = $remoteInfo[1] + } else { + $msg = "Got unknown format for the parameter ComputerName: " ` + + "[$originalComputerName]. " ` + + "The allowed formats is [hostname] or [hostname:port]." + Write-Error $msg + return + } + + $tcpClient = New-Object System.Net.Sockets.TcpClient + $portOpened = $tcpClient.ConnectAsync($remoteHostname, $remotePort).Wait($Timeout) + + $null = $result.Add([PSCustomObject]@{ + RemoteHostname = $remoteHostname + RemotePort = $remotePort + PortOpened = $portOpened + TimeoutInMillisecond = $Timeout + SourceHostname = $env:COMPUTERNAME + OriginalComputerName = $originalComputerName + }) + } + } + + end { + return $result + } +} +``` + +### Test-Port in parallel + +Although the timeout in Test-Port is 1000 milliseconds, if we have 100 hosts to check and if all the ports are not opened, Test-Port will be slow too, because it runs the check in serial. + +I don't prefer to implement the parallel inside Test-Port, as we have already some pure powershell parallel solutions by using the [RunspacePool](https://docs.microsoft.com/en-us/dotnet/api/system.management.automation.runspaces.runspacepool) ([PoshRSJob](https://github.com/proxb/PoshRSJob), [Invoke-Parallel](https://github.com/RamblingCookieMonster/PowerShell/blob/master/Invoke-Parallel.ps1), etc.). And Microsoft is releasing its home-born parallel mechanism `ForEach-Object -Parallel` for Powershell 7. diff --git a/docs/posts/2019/2019-09-25-sqlalchemy-mixin-in-method.md b/docs/posts/2019/2019-09-25-sqlalchemy-mixin-in-method.md new file mode 100644 index 00000000..94e6e809 --- /dev/null +++ b/docs/posts/2019/2019-09-25-sqlalchemy-mixin-in-method.md @@ -0,0 +1,58 @@ +--- +authors: +- copdips +categories: +- python +- sqlalchemy +comments: true +date: + created: 2019-09-25 +description: Share common methods across SQLAlchemy db model classes by using mixin. +--- + +# SQLAlchemy mixin in method + +If I'm not wrong, the [SQLAlchemy official doc](https://docs.sqlalchemy.org/en/latest/orm/extensions/declarative/mixins.html) provides some examples to explain how to share a set of common columns, some common table options, or other mapped properties, across many classes. But I cannot find how to share common methods (e.g. your customized to_dict() method). This post will just show you a POC to achieve this goal by using [Python Mixin](https://realpython.com/inheritance-composition-python/). + +## Share the common method to_dict() across two SQLAlchemy models + +```python +from sqlalchemy import Column, Integer, String +from sqlalchemy.ext.declarative import declarative_base + +Base = declarative_base() + + +class ModelMixin(object): + + def to_dict(self): + return {c.name: getattr(self, c.name) for c in self.__table__.columns} + + +class ModelA(Base, ModelMixin): + __tablename__ = "model_a" + + model_id = Column(Integer, primary_key=True) + name = Column(String) + + +class ModelB(Base, ModelMixin): + __tablename__ = "model_b" + + model_id = Column(Integer, primary_key=True) + name = Column(String) +``` + +Test: + +```python +# to_dict() method from ModelMixin is shared between ModelA and ModelB + +>>> a = ModelA(model_id=11, name='a1') +>>> a.to_dict() +{'model_id': 11, 'name': 'a1'} + +>>> b = ModelB(model_id=22, name='b1') +>>> b.to_dict() +{'model_id': 22, 'name': 'b1'} +``` diff --git a/docs/posts/2019/2019-10-27-installing-python3-on-ubuntu.md b/docs/posts/2019/2019-10-27-installing-python3-on-ubuntu.md new file mode 100644 index 00000000..2762aed1 --- /dev/null +++ b/docs/posts/2019/2019-10-27-installing-python3-on-ubuntu.md @@ -0,0 +1,125 @@ +--- +authors: +- copdips +categories: +- python +- ubuntu +comments: true +date: + created: 2019-10-27 + updated: 2021-03-17 +description: Install Python3 on Ubuntu by using official source. +--- + +# Install Python3 on Ubuntu + +Most of tutorials on the Internet about installing Python3.6 on Ubuntu are by [using 3rd party PPA repositories](http://ubuntuhandbook.org/index.php/2017/07/install-python-3-6-1-in-ubuntu-16-04-lts/). If for any reason, you cannot use them, hereunder a quick tutorial for installing it from the Python official source, you should in advance download the source to the Ubuntu. + +## Installing Python3.6 on Ubuntu 16.04 + +### Disabling IPv6 + +IPv6 is enabled by default on Ubuntu 16.04, in some cases, your Ubuntu network connection might be very low due to IPv6. Use `ip a | grep inet6` to check if IPv6 is enabled. + +Ref: [How to disable ipv6 address on ubuntu 18 04 bionic beaver linux](https://linuxconfig.org/how-to-disable-ipv6-address-on-ubuntu-18-04-bionic-beaver-linux) + +To disable IPv6 in a persist way, add following 2 lines in the file `/etc/sysctl.conf` and [reload the sysctl by `sudo sysctl --system`](https://www.cyberciti.biz/faq/reload-sysctl-conf-on-linux-using-sysctl/) or reboot the server: + +``` +net.ipv6.conf.all.disable_ipv6=1 +net.ipv6.conf.default.disable_ipv6=1 +``` + +### Installing build packages + +```bash +sudo apt install -y build-essential zlib1g-dev libssl-dev +``` + +!!! note + + without `libssl-dev` package, pip install will throw TLS/SSL error. + +!!! note + + From this point of view, installing Python on Windows by Scoop is much more pleasant :) + +### Installing Python3.6 from official source + +The latest Python3.6 version at the time of this writing is 3.6.9. + +```bash +# You may download the Python source to a local shared location (S3 or Artifactory, etc.) if you need to deploy Python to many servers. +wget https://www.python.org/ftp/python/3.6.9/Python-3.6.9.tgz +tar xzvf Python-3.6.9.tgz +cd Python-3.6.9 +sudo ./configure --prefix=/opt/python3.6 +make -j $(nproc) +sudo make install +sudo ln -s /opt/python3.6/bin/python3.6 /usr/bin/python3.6 +``` + +!!! warning + + Python3.5 is preinstalled by default on Ubuntu 16.04, `python3 -V` gives `Python 3.5.2`, many system tools rely on it, please **DO NOT** bind python3 to any versions other than Python3.5, otherwise your system might have unexpected problems. + +!!! note + + For a general Python installation not only for this Python3.6, if you have `gcc v8+`, you can add the flag `--enable-optimizations` to `./configure` to gain an extra runtime speed, otherwise you might encounter `Could not import runpy module` error + +### Using Python3.6 pip + +```bash +python3.6 -m pip install [a python module] +``` + +### Prevent pip install without an active venv + +```bash +echo 'export PIP_REQUIRE_VIRTUALENV=true' >> ~/.bashrc +``` + +## Installing Python3.7 on Ubuntu 16.04 + +Just tested installing Python3.7.5 with the same procedure, all works. + +## Installing Python3.10.10 with sqlite3 on Ubuntu 20.04 in WSL + +```bash +# install build packages +sudo apt update +sudo apt install -y build-essential zlib1g-dev libssl-dev libffi-dev + +# install sqlite3 from source, if you need a specific sqlite3 version in Python, you must install it before compiling Python, because the compilation needs the lib libsqlite3.so +mkdir ~/src +cd ~/src/ +wget https://www.sqlite.org/2021/sqlite-autoconf-3400100.tar.gz +tar xvf sqlite-autoconf-3400100.tar.gz +cd sqlite-autoconf-3400100/ +./configure --prefix=/usr/local +make -j $(nproc) +sudo make install +make clean +ll /usr/local/bin/sqlite* +ll /usr/local/lib/*sqlite* + +# let below Python compilation to use the newly installed sqlite3 lib +export LD_LIBRARY_PATH=/usr/local/lib:$LD_LIBRARY_PATH + +# install python3.10.10 from source +cd ~/src/ +wget https://www.python.org/ftp/python/3.10.10/Python-3.10.10.tgz +tar xvf Python-3.10.10.tgz +cd Python-3.10.10/ + +# ubuntu 20.04 has gcc v9, so you can add the flag --enable-optimizations to ./configure +# --with-bz2 is for pandas, otherwise modulenotfounderror: no module named '_bz2' pandas +./configure --prefix=$HOME/opt/python3.10 --with-bz2 +make -j $(nproc) +sudo make install +make clean +sudo ln -s ~/opt/python3.10/bin/python3.10 /usr/bin/python3.10 +ll $(which python3.10) +echo -e '\nexport PIP_REQUIRE_VIRTUALENV=true' >> ~/.bashrc +python3.10 -c 'import sqlite3 ; print(sqlite3.sqlite_version)' +``` diff --git a/docs/posts/2019/2019-12-21-elastic-painless-scripted-field-on-null-or-mssing-value.md b/docs/posts/2019/2019-12-21-elastic-painless-scripted-field-on-null-or-mssing-value.md new file mode 100644 index 00000000..b03e26ba --- /dev/null +++ b/docs/posts/2019/2019-12-21-elastic-painless-scripted-field-on-null-or-mssing-value.md @@ -0,0 +1,49 @@ +--- +authors: +- copdips +categories: +- elastic +comments: true +date: + created: 2019-12-21 +description: How to use painless scripted field to working on objects which might + be null or missing in some documents. +--- + +# Elastic Painless Scripted Field On Null/Missing Value + +This post shows how to use elastic painless language in scripted field to work on documents' keys which might not exist in some documents. + +## Parsing analyzed field in Painless + +Suppose we have following 2 documents in elastic: + +```json +[{ + "kye1": "value1", + "key2": { + "key22": "value22" + } +}, { + "key1": "valuex" +}] +``` + +The key `key22` in the first document can be accessed by `doc['key2.key22'].value`. If we use this script in the scripted field, we will see a null value for all the documents. This is because the second document doesn't have the key `key22`, painless language will throw an error. This [github issue](https://github.com/elastic/elasticsearch/issues/33816) is discussing how to return a default value if it is missing. + +To workaround this, I found a solution from this [github issue](https://github.com/elastic/elasticsearch/issues/24125#issuecomment-375874356). We should check the null value each time. + +The script should be: + +```json +(params._source.key2 == null) ? '' : ( (params._source.key2.key22 == null) ? '' : (params._source.key2.key22) )) +``` + +!!! warning + + Parsing documents by `params._source` is [very slow](https://findingdata.rocks/elasticsearch-scripting-understanding-the-difference-between-doc-and-params/). It's not cached, and is calculated in real-time each time. + +!!! warning + + The fields calculated by the scripted field is not searchable. + diff --git a/docs/posts/2019/2019-12-29-Using-Powershell-to-retrieve-latest-package-url-from-github-releases.md b/docs/posts/2019/2019-12-29-Using-Powershell-to-retrieve-latest-package-url-from-github-releases.md new file mode 100644 index 00000000..ab1fc742 --- /dev/null +++ b/docs/posts/2019/2019-12-29-Using-Powershell-to-retrieve-latest-package-url-from-github-releases.md @@ -0,0 +1,51 @@ +--- +authors: +- copdips +categories: +- powershell +comments: true +date: + created: 2019-12-29 +description: Github can host package releases, I will show you how to use Powershell + to retrieve the latest release download url. +--- + +# Using Powershell To Retrieve Latest Package Url From Github Releases + +Github can host package releases, I will show you how to use Powershell to retrieve the latest release download url. + + + +## Download latest Powershell release for Windows x64 zip version + +The goal of this demo is to convert the static url: + +- [https://github.com/PowerShell/PowerShell/releases/latest](https://github.com/PowerShell/PowerShell/releases/latest) + +to the real download url (latest version on 2019/12/29): + +- [https://github.com/PowerShell/PowerShell/releases/download/v6.2.3/PowerShell-6.2.3-win-x64.zip](https://github.com/PowerShell/PowerShell/releases/download/v6.2.3/PowerShell-6.2.3-win-x64.zip) + +```powershell +> $url = 'https://github.com/PowerShell/PowerShell/releases/latest' +> $request = [System.Net.WebRequest]::Create($url) +> $response = $request.GetResponse() +> $realTagUrl = $response.ResponseUri.OriginalString +> $version = $realTagUrl.split('/')[-1].Trim('v') +> $version +6.2.3 +> $fileName = "PowerShell-$version-win-x64.zip" +> $realDownloadUrl = $realTagUrl.Replace('tag', 'download') + '/' + $fileName +> $realDownloadUrl +https://github.com/PowerShell/PowerShell/releases/download/v6.2.3/PowerShell-6.2.3-win-x64.zip +> Invoke-WebRequest -Uri $realDownloadUrl -OutFile $env:TEMP/$fileName +``` + +!!! note + + The same method can be applied to retrieve other urls on other sites. + +!!! note + + The powershell pre-release doesn't have a static url, so I cannot retrieve the latest [v7.0.0-rc.1](https://github.com/PowerShell/PowerShell/releases/tag/v7.0.0-rc.1) download url. + diff --git a/docs/posts/2019/2019-12-29-Using-Scoop-On-Windows.md b/docs/posts/2019/2019-12-29-Using-Scoop-On-Windows.md new file mode 100644 index 00000000..3066c99a --- /dev/null +++ b/docs/posts/2019/2019-12-29-Using-Scoop-On-Windows.md @@ -0,0 +1,121 @@ +--- +authors: +- copdips +categories: +- powershell +comments: true +date: + created: 2019-12-29 + updated: 2020-01-11 +description: Some tips to use Scoop. +--- + +# Using Scoop On Windows + +I've been using [Scoop](https://github.com/lukesampson/scoop) for setting up my personal and professional Windows development desktops since nearly 2 years. + + + +For me, it's much more useful than another famous Windows package management tool [Chocolatey](https://github.com/lukesampson/scoop/wiki/Chocolatey-Comparison), because with Scoop, everything is run & installed without any administrator privileges. +This is very important in an enterprise environment, that all the enterprise Windows administrators are trying their best to prevent you from installing anything on Windows. This post will share my ways to use it **especially in such an enterprise environment**. BTW, Scoop is completely a Powershell open source project and free for use. + +## Using external 7Zip + +7Zip is a prerequisite for Scoop which is used for decompress many tools (git, conemu, etc.). +By default, Scoop will download 7Zip from its official website [https://7-zip.org/a/7z1900-x64.msi](https://github.com/ScoopInstaller/Main/blob/master/bucket/7zip.json#L11). +Unfortunately, this website is probably excluded by some enterprises' security gateway/tool. + +But, fortunately, 7Zip is often already installed by enterprises' deployment tool by default. + +So, in order to let Scoop to use this external 7Zip pre-installed by enterprise admin rather than `$env:SCOOP\apps\7zip`, we need to set following config: + +```powershell +scoop config '7ZIPEXTRACT_USE_EXTERNAL' $true +``` + +This tips is not documented yet in the [Scoop Wiki](https://github.com/lukesampson/scoop/wiki). + +BTW: Maybe coping manually the 7Zip files to `$env:SCOOP\apps\7zip` will work too, but I haven't tested yet. + +## Scoop TLS/SSL support + +Scoop uses following methods to support different TLS/SSL versions: + +Previously: + +```powershell +# https://github.com/lukesampson/scoop/issues/2040#issuecomment-368298352 + +function set_https_protocols($protocols) { + try { + [System.Net.ServicePointManager]::SecurityProtocol = [System.Net.SecurityProtocolType] $protocols + } catch { + [System.Net.ServicePointManager]::SecurityProtocol = "Tls,Tls11,Tls12" + } +} + +function use_any_https_protocol() { + $original = "$([System.Net.ServicePointManager]::SecurityProtocol)" + $available = [string]::join(', ', [Enum]::GetNames([System.Net.SecurityProtocolType])) + + # use whatever protocols are available that the server supports + set_https_protocols $available + + return $original +} + +function do_dl($url, $to, $cookies) { + $original_protocols = use_any_https_protocol + $progress = [console]::isoutputredirected -eq $false + + try { + $url = handle_special_urls $url + dl $url $to $cookies $progress + } catch { + $e = $_.exception + if($e.innerexception) { $e = $e.innerexception } + throw $e + } finally { + set_https_protocols $original_protocols + } +} +``` + +Now: + +```powershell +# https://github.com/lukesampson/scoop/blob/48bb96a3d80ed722317a88afbae126c40ee205e8/lib/core.ps1#L1 + +function Optimize-SecurityProtocol { + # .NET Framework 4.7+ has a default security protocol called 'SystemDefault', + # which allows the operating system to choose the best protocol to use. + # If SecurityProtocolType contains 'SystemDefault' (means .NET4.7+ detected) + # and the value of SecurityProtocol is 'SystemDefault', just do nothing on SecurityProtocol, + # 'SystemDefault' will use TLS 1.2 if the webrequest requires. + $isNewerNetFramework = ([System.Enum]::GetNames([System.Net.SecurityProtocolType]) -contains 'SystemDefault') + $isSystemDefault = ([System.Net.ServicePointManager]::SecurityProtocol.Equals([System.Net.SecurityProtocolType]::SystemDefault)) + + # If not, change it to support TLS 1.2 + if (!($isNewerNetFramework -and $isSystemDefault)) { + # Set to TLS 1.2 (3072), then TLS 1.1 (768), and TLS 1.0 (192). Ssl3 has been superseded, + # https://docs.microsoft.com/en-us/dotnet/api/system.net.securityprotocoltype?view=netframework-4.5 + [System.Net.ServicePointManager]::SecurityProtocol = 3072 -bor 768 -bor 192 + } +} +``` + +We can reuse it elsewhere. + +## Scoop aria2 skip certificate check + +To use aria2 within Scoop to download packages in multithreading: + +```powershell +scoop config aria2-enabled true +``` + +But aria2 by default checks the certificate, to skip the check, use [aria2-options](https://github.com/lukesampson/scoop/pull/3780): + +```powershell +scoop config aria2-options @('--check-certificate=false') +``` diff --git a/docs/posts/2020/2020-02-01-setting-up-wsl.md b/docs/posts/2020/2020-02-01-setting-up-wsl.md new file mode 100644 index 00000000..89963e90 --- /dev/null +++ b/docs/posts/2020/2020-02-01-setting-up-wsl.md @@ -0,0 +1,43 @@ +--- +authors: +- copdips +categories: +- linux +comments: true +date: + created: 2020-02-01 +description: Setting up WSL (Windows Subsystem for Linux) +--- + +# Setting up WSL + +## Cleaning up manually the WSL instance + +For any reason you failed to install WSL from Microsoft store, you might need to clean up manually the downloaded WSL instance, the default location is at: `$env:LOCALAPPDATA\Packages` + +For example, Ubuntu v1804 is at: `C:\Users\xiang\AppData\Local\Packages\CanonicalGroupLimited.UbuntuonWindows_79rhkp1fndgsc\` + +Just delete the folder then reinstall it from Microsoft store. + +## Changing the default ls output directory color + +[https://github.com/microsoft/vscode/issues/7556](https://github.com/microsoft/vscode/issues/7556) + +[https://askubuntu.com/a/466203](https://askubuntu.com/a/466203) + +```bash +# add to ~/.bashrc +export LS_COLORS="ow=0;36;40" +``` +ow = (OTHER_WRITABLE) Directory that is other-writable (o+w) and not sticky + + + +## Installing Python3.7 on Ubuntu 1804 + +Installing Python3.7 will automatically prompt you to update libssl. + +```bash +sudo apt update +sudo apt install python3.7 python3.7-venv python3-venv +``` diff --git a/docs/posts/2020/2020-03-09-flattening-nested-dict-in-python.md b/docs/posts/2020/2020-03-09-flattening-nested-dict-in-python.md new file mode 100644 index 00000000..9d195f1c --- /dev/null +++ b/docs/posts/2020/2020-03-09-flattening-nested-dict-in-python.md @@ -0,0 +1,92 @@ +--- +authors: +- copdips +categories: +- python +comments: true +date: + created: 2020-03-09 +description: Flattening a nested dict/json with list as some keys' value. +--- + +# Flattening nested dict in Python + +## Problem + +Given a nested dict with list as some keys' value, we want to flatten the dict to a list. + +For example, given a dict as like: + +```python +nested_data = { + "env": ["prd", "dev"], + "os": ["win", "unx"], + "msg": "ok" +} +``` + +we want to convert it to a list as like: + +```python +{'msg': 'ok', 'env': 'prd', 'os': 'win'} +{'msg': 'ok', 'env': 'prd', 'os': 'unx'} +{'msg': 'ok', 'env': 'dev', 'os': 'win'} +{'msg': 'ok', 'env': 'dev', 'os': 'unx'} +``` + +## Solution + +```python +from copy import deepcopy +import itertools + +nested_data = { + "env": ["prd", "dev"], + "os": ["win", "unx"], + "msg": "ok" +} + +base_data = {} +non_base_data = [] + +for k, v in nested_data.items(): + if isinstance(v, list): + non_base_data.append([{k: single_v} for single_v in v]) + else: + base_data.update({k: v}) + +print("base_data:", base_data) +print("non_base_data:", non_base_data) + +flatted_list = list(itertools.product(*tuple(non_base_data))) + +for l in flatted_list: + print(l) +print(len(flatted_list)) + + +flatted_data = [] +for one_combination in flatted_list: + line = deepcopy(base_data) + for column in one_combination: + line.update(column) + flatted_data.append(line) + +for l in flatted_data: + print(l) +print(len(flatted_data)) + + +# base_data: {'msg': 'ok'} +# non_base_data: [[{'env': 'prd'}, {'env': 'dev'}], [{'os': 'win'}, {'os': 'unx'}]] +# ({'env': 'prd'}, {'os': 'win'}) +# ({'env': 'prd'}, {'os': 'unx'}) +# ({'env': 'dev'}, {'os': 'win'}) +# ({'env': 'dev'}, {'os': 'unx'}) +# 4 +# {'msg': 'ok', 'env': 'prd', 'os': 'win'} +# {'msg': 'ok', 'env': 'prd', 'os': 'unx'} +# {'msg': 'ok', 'env': 'dev', 'os': 'win'} +# {'msg': 'ok', 'env': 'dev', 'os': 'unx'} +# 4 +``` diff --git a/docs/posts/2020/2020-04-13-fixing-ipython-on-Windows10-ConEmu-mouse-event-bug.md b/docs/posts/2020/2020-04-13-fixing-ipython-on-Windows10-ConEmu-mouse-event-bug.md new file mode 100644 index 00000000..d8ee0b68 --- /dev/null +++ b/docs/posts/2020/2020-04-13-fixing-ipython-on-Windows10-ConEmu-mouse-event-bug.md @@ -0,0 +1,143 @@ +--- +authors: +- copdips +categories: +- python +comments: true +date: + created: 2020-04-13 +description: 'This post is for manually fixing an ipython Windows ConEmu only bug + (from prompt_toolkit): Exception ''MouseEventType.MOUSE_DOWN'' is not a valid MouseEventType' +--- + +# Fixing an ipython Windows ConEmu only bug on 'MouseEventType.MOUSE_DOWN' + +## Problem + +Previously I updated the python version, the ipython version and maybe ConEmu on my Windows 10 (I don't remember which one exactly), I got an error when I wanted to copy some text from ipython repl in ConEmu console by the right mouse click: + +```python +ps.7.0.0 | py.3.8.2❯ ipython +Python 3.8.2 (tags/v3.8.2:7b3ab59, Feb 25 2020, 23:03:10) [MSC v.1916 64 bit (AMD64)] +Type 'copyright', 'credits' or 'license' for more information +IPython 7.13.0 -- An enhanced Interactive Python. Type '?' for help. + + +Unhandled exception in event loop: + File "d:\xiang\tools\scoop\apps\python\3.8.2\lib\asyncio\events.py", line 81, in _run + self._context.run(self._callback, *self._args) + File "d:\xiang\tools\scoop\apps\python\3.8.2\lib\site-packages\prompt_toolkit\input\win32.py", line 512, in ready + callback() + File "d:\xiang\tools\scoop\apps\python\3.8.2\lib\site-packages\prompt_toolkit\application\application.py", line 653, in read_from_input + self.key_processor.process_keys() + File "d:\xiang\tools\scoop\apps\python\3.8.2\lib\site-packages\prompt_toolkit\key_binding\key_processor.py", line 274, in process_keys + self._process_coroutine.send(key_press) + File "d:\xiang\tools\scoop\apps\python\3.8.2\lib\site-packages\prompt_toolkit\key_binding\key_processor.py", line 186, in _process + self._call_handler(matches[-1], key_sequence=buffer[:]) + File "d:\xiang\tools\scoop\apps\python\3.8.2\lib\site-packages\prompt_toolkit\key_binding\key_processor.py", line 329, in _call_handler + handler.call(event) + File "d:\xiang\tools\scoop\apps\python\3.8.2\lib\site-packages\prompt_toolkit\key_binding\key_bindings.py", line 101, in call + self.handler(event) + File "d:\xiang\tools\scoop\apps\python\3.8.2\lib\site-packages\prompt_toolkit\key_binding\bindings\mouse.py", line 128, in _mouse + event_type = MouseEventType(pieces[0]) + File "d:\xiang\tools\scoop\apps\python\3.8.2\lib\enum.py", line 304, in __call__ + return cls.__new__(cls, value) + File "d:\xiang\tools\scoop\apps\python\3.8.2\lib\enum.py", line 595, in __new__ + raise exc + File "d:\xiang\tools\scoop\apps\python\3.8.2\lib\enum.py", line 579, in __new__ + result = cls._missing_(value) + File "d:\xiang\tools\scoop\apps\python\3.8.2\lib\enum.py", line 608, in _missing_ + raise ValueError("%r is not a valid %s" % (value, cls.__name__)) + +Exception 'MouseEventType.MOUSE_DOWN' is not a valid MouseEventType +Press ENTER to continue... +``` + +## Root cause + +From the error stack, we can identify that it should be this line from the prompt_toolkit which throws the error: + +```python + File "d:\xiang\tools\scoop\apps\python\3.8.2\lib\site-packages\prompt_toolkit\key_binding\bindings\mouse.py", line 128, in _mouse + event_type = MouseEventType(pieces[0]) +``` + +And hereunder is the ipython and prompt_toolkit version installed on my Windows 10. + +```powershell +ps.7.0.0 | py.3.8.2❯ pip list | sls ipython, prompt + +ipython 7.13.0 +ipython-genutils 0.2.0 +prompt-toolkit 3.0.4 +``` + +Let's check the [source code](https://github.com/prompt-toolkit/python-prompt-toolkit/blob/dcc7adf0d5bcd0fd9db32ade713caaa56edbf757/prompt_toolkit/key_binding/bindings/mouse.py#L126) of the prompt_toolkit: + +```python +@key_bindings.add(Keys.WindowsMouseEvent) +def _mouse(event: E) -> None: + """ + Handling of mouse events for Windows. + """ + assert is_windows() # This key binding should only exist for Windows. + + # Parse data. + pieces = event.data.split(";") + + event_type = MouseEventType(pieces[0]) +``` + +And let's add some simple debug code by using print(): + +```python +@key_bindings.add(Keys.WindowsMouseEvent) +def _mouse(event: E) -> None: + """ + Handling of mouse events for Windows. + """ + assert is_windows() # This key binding should only exist for Windows. + + # Parse data. + pieces = event.data.split(";") + + # start debug + for met in MouseEventType: + print("met:", met) + print("pieces[0]:", pieces[0]) + # end debug + + event_type = MouseEventType(pieces[0]) +``` + +Reproduce the error in ipython, I got the print info: + +```python +met: MouseEventType.MOUSE_UP +met: MouseEventType.MOUSE_DOWN +met: MouseEventType.SCROLL_UP +met: MouseEventType.SCROLL_DOWN +pieces[0]: MouseEventType.MOUSE_DOWN +``` + +Visually it seems that `pieces[0]` is in the MouseEventType, but as MouseEventType is an [Enum type](https://docs.python.org/3.8/library/enum.html), the correct syntax is that `pieces[0]` should not be prefixed by the enum class name `MouseEventType`, instead we can use the string format of the type, so called [programmatic access](https://docs.python.org/3.8/library/enum.html#programmatic-access-to-enumeration-members-and-their-attributes): `MouseEventType["MOUSE_DOWN"]` + +## Solution + +Adding a split on `pieces[0]` object can workaround the issue, but to fix it definitively, in fact, the author already fixed it a couple of weeks ago: + +[https://github.com/prompt-toolkit/python-prompt-toolkit/issues/1099](https://github.com/prompt-toolkit/python-prompt-toolkit/issues/1099) + +[https:/ +/pull/1105/commits/d2e7da3be5e46a5c8b432f67f78b662541b957de](https://github.com/prompt-toolkit/python-prompt-toolkit/pull/1105/commits/d2e7da3be5e46a5c8b432f67f78b662541b957de) + +```diff +# prompt_toolkit/input/win32.py +# On a key press, generate both the mouse down and up event. +for event_type in [MouseEventType.MOUSE_DOWN, MouseEventType.MOUSE_UP]: + data = ";".join( +- [str(event_type), str(ev.MousePosition.X), str(ev.MousePosition.Y)] ++ [event_type.value, str(ev.MousePosition.X), str(ev.MousePosition.Y)] + ) + result.append(KeyPress(Keys.WindowsMouseEvent, data)) +``` diff --git a/docs/posts/2020/2020-04-13-making-isort-compatible-with-black.md b/docs/posts/2020/2020-04-13-making-isort-compatible-with-black.md new file mode 100644 index 00000000..0d19313b --- /dev/null +++ b/docs/posts/2020/2020-04-13-making-isort-compatible-with-black.md @@ -0,0 +1,117 @@ +--- +authors: +- copdips +categories: +- python +- linter +- vscode +comments: true +date: + created: 2020-04-13 + updated: 2021-03-28 +description: Making isort compatible with black +--- + +# Making isort compatible with black + +!!! note + + Update 2020-12-06, thanks to [Christian Jauvin's comment](https://www.copdips.com/2020/04/making-isort-compatible-with-black.html#comment-5178374085), since isort v5, it has introduced `--profile=black` option, so the life is much easier now:) + +Both [isort](https://github.com/timothycrosley/isort) and [black](https://github.com/psf/black) are a must have in my python life, but with their default settings, I will get different imports formats. + + + +## multi_line_output, include_trailing_comma and line_length + +The main difference between isort and black are on there points: + +1. the multi line mode +2. the trailing comma of the last import +3. the max line length + +Personally, I prefer making isort compatible with black, so the settings to be used with isort is: `isort -m 3 -tc` + +As per isort settings [wiki](https://github.com/timothycrosley/isort/wiki/isort-Settings): + +- [`-m 3`](https://github.com/timothycrosley/isort#multi-line-output-modes) standards for multi line mode 3, which is `Vertical Hanging Indent` + + ```python + from third_party import ( + lib1, + lib2, + lib3, + lib4, + ) + ``` + +- `-tc` standards for adding trailing comma for each import including the last one + +There's also a param `-w 88` to set the max line length to 88, but with multi line mode 3, we rarely need it. + +There's also a param `-rc` to recursively sort on all files in the project. + +We can also use isort custom profile to overwrite the default settings as shown [here](https://github.com/timothycrosley/isort#configuring-isort). And to use the custom profile in VSCode: +```json +# https://github.com/microsoft/vscode/issues/83586#issuecomment-557334564 +"python.sortImports.args": [ + "--settings-path=${workspaceFolder}/setup.cfg" +] +!!! note + + ``` + +## isort with VSCode + +isort v5-: + +[https://pycqa.github.io/isort/docs/configuration/profiles/](https://pycqa.github.io/isort/docs/configuration/profiles/) + +```json +{ + "editor.formatOnSave":true, + "python.sortImports.path": "isort", + "python.sortImports.args":[ + "-m 3", + "-tc", + ], + "[python]":{ + "editor.codeActionsOnSave":{ + # it was `"source.organizeImports": true` in my first version of this post, + # see below comment for explanation. + "source.organizeImports.python": true + } + } +} +``` + +isort v5+: + +```json +{ + "editor.formatOnSave":true, + "python.sortImports.path": "isort", + "python.sortImports.args":[ + "--profile=black", + ], + "[python]":{ + "editor.codeActionsOnSave":{ + # it was `"source.organizeImports": true` in my first version of this post, + # see below comment for explanation. + "source.organizeImports.python": true + } + } +} +``` + +After some days of using above settings, I found a very frustrating behavior that when I pressed Ctrl+S multiple times to save manually a python file, the imports part changed upon each save, and sometimes it even [deleted some imports](https://github.com/microsoft/vscode/issues/83586#issuecomment-607497052)... +Digged in github, people have already reported the issue. See [issues/83586](https://github.com/microsoft/vscode/issues/83586), and [issues/9889](https://github.com/microsoft/vscode-python/issues/9889) +!!! warning + + The solution (workaround) is [here](https://github.com/microsoft/vscode/issues/90221#issuecomment-583664840). Replace `"source.organizeImports":true` by `source.organizeImports.python` to allow codeActionsOnSave to specify which extension to use for a given on save action, the way `editor.defaultFormatter` or `python.formatting.provider` work. + +## isort with git hook + +Just in case you're interested in git hook, the settings is [here](https://github.com/timothycrosley/isort#git-hook). + +Update 2021-03-28: [using git pre-commit](https://copdips.com/2021/01/python-lint-and-format.html#git-pre-commit). diff --git a/docs/posts/2020/2020-05-05-using-python-contextmanager-to-create-a-timer-decorator.md b/docs/posts/2020/2020-05-05-using-python-contextmanager-to-create-a-timer-decorator.md new file mode 100644 index 00000000..58ca205f --- /dev/null +++ b/docs/posts/2020/2020-05-05-using-python-contextmanager-to-create-a-timer-decorator.md @@ -0,0 +1,75 @@ +--- +authors: +- copdips +categories: +- python +comments: true +date: + created: 2020-05-05 +description: Using Python contextmanager to create a timer decorator +--- + +# Using Python Contextmanager To Create A Timer Decorator + +This [stackoverflow post](https://stackoverflow.com/a/30024601/5095636) has already given an example on how to use contextmanager to create a timer decorator: + +```python +from contextlib import contextmanager +from timeit import default_timer + +@contextmanager +def elapsed_timer(): + start = default_timer() + elapser = lambda: default_timer() - start + yield lambda: elapser() + end = default_timer() + elapser = lambda: end-start +``` + +It works well, but flake8 linter warns me that: `[E731]: do not assign a lambda expression, use a def.` + +So hereunder the lambda free version: + +```python +from contextlib import contextmanager +from timeit import default_timer + +@contextmanager +def elapsed_timer(): + start_time = default_timer() + + class _Timer(): + start = start_time + end = default_timer() + duration = end - start + + yield _Timer + + end_time = default_timer() + _Timer.end = end_time + _Timer.duration = end_time - start_time +``` + +Test: + +```python +In [67]: from time import sleep + ...: + ...: def sleep_1s(): + ...: sleep(1) + ...: + ...: with elapsed_timer() as t: + ...: sleep_1s() + ...: + +In [68]: t.start +Out[68]: 4583.4985535 + +In [69]: t.end +Out[69]: 4584.4983676 + +In [70]: t.duration +Out[70]: 0.9998141000005489 + +# the duration is less than 1s, it's default_timer of timeit. +``` diff --git a/docs/posts/2020/2020-06-08-compiling-sqlalchemy-query-to-nearly-real-raw-sql-query.md b/docs/posts/2020/2020-06-08-compiling-sqlalchemy-query-to-nearly-real-raw-sql-query.md new file mode 100644 index 00000000..93460596 --- /dev/null +++ b/docs/posts/2020/2020-06-08-compiling-sqlalchemy-query-to-nearly-real-raw-sql-query.md @@ -0,0 +1,185 @@ +--- +authors: +- copdips +categories: +- python +- sqlalchemy +comments: true +date: + created: 2020-06-08 + updated: 2020-06-08 +description: Compiling SQLAlchemy query to nearly real raw sql query +--- + +# Compiling SQLAlchemy query to nearly real raw sql query + +## Some useful links + +1. [https://stackoverflow.com/questions/5631078/sqlalchemy-print-the-actual-query](https://stackoverflow.com/questions/5631078/sqlalchemy-print-the-actual-query) +1. [https://docs.sqlalchemy.org/en/13/faq/sqlexpressions.html?highlight=literal_bind#rendering-bound-parameters-inline](https://docs.sqlalchemy.org/en/13/faq/sqlexpressions.html?highlight=literal_bind#rendering-bound-parameters-inline) +1. [https://docs.sqlalchemy.org/en/13/core/engines.html#configuring-logging](https://docs.sqlalchemy.org/en/13/core/engines.html#configuring-logging) + +## Query to compile + +Suppose we have a table called Movie, and a column release_date in the table Movie. + +```python +> from datetime import date + +> from sqlalchemy import create_engine, sessionmaker + +> engine = create_engine('sqlite:///moive_example.db') +> Session = sessionmaker(bind=engine) +> session = Session() + +> filter1 = Movie.release_date > date(2015, 1, 1) + +> filter1 + + +> str(filter1) +'movies.release_date > :release_date_1' + +> query1 = session.query(Movie).filter(Movie.release_date > date(2015, 1, 1)).limit(2) + +> query1 +> + +> str(query1) +'SELECT movies.id AS movies_id, movies.title AS movies_title, movies.release_date AS movies_release_date \nFROM movies \nWHERE movies.release_date > ?\n LIMIT ? OFFSET ?' +``` + +## Compiling to ORM sql query + +As per the method given by [Rendering Bound Parameters Inline](https://docs.sqlalchemy.org/en/13/faq/sqlexpressions.html?highlight=literal_bind#rendering-bound-parameters-inline): + +### Compiling filter1 to ORM sql query + +```python +> filter1.compile() + + +> str(filter1.compile()) +'movies.release_date > :release_date_1' + +> str(filter1.compile().params) +"{'release_date_1': datetime.date(2015, 1, 1)}" + +> filter1.compile(compile_kwargs={"literal_binds": True}) + + +> str(filter1.compile(compile_kwargs={"literal_binds": True})) +"movies.release_date > '2015-01-01'" +``` + +### Compiling query1 to ORM sql query + +```python +> str(query1.statement.compile()) +'SELECT movies.id, movies.title, movies.release_date \nFROM movies \nWHERE movies.release_date > :release_date_1\n LIMIT :param_1' + +> str(query1.statement.compile().params) +"{'release_date_1': datetime.date(2015, 1, 1), 'param_1': 2}" + +> str(query1.statement.compile(compile_kwargs={"literal_binds": True})) +"SELECT movies.id, movies.title, movies.release_date \nFROM movies \nWHERE movies.release_date > '2015-01-01'\n LIMIT 2" +``` + +!!! warning + + As given by the paragraph name, the above compiled query is not the real raw sql query sent to the database, it's an ORM one. But it's more or less enough for debugging or logging purpose. See below paragraph to get how to compile to real raw sql query. + +## Compiling to nearly real raw sql query + +!!! warning + + SQLAlchemy doesn't provide an out of the box function to compile a statement to the real raw sql query, and as per some issues' comments, it seems that the authors wouldn't like to implement it. There's no official way, this part is based on some solutions provided by the community. + +If you want to compile to real raw sql query, we should add the corresponding dialect, but be aware that it compiles only some simple types like `Integer`, `String`, etc. For complex types like `Date`, we need to [use `TypeDecorator` to tell SQLAlchemy how to literal render these complex types](https://stackoverflow.com/a/23835766/5095636). Using `TypeDecorator` means to modify your DB models, which is sometimes not a comfortable way. + +Below 2 examples (by [using engine or using dialect](https://docs.sqlalchemy.org/en/13/faq/sqlexpressions.html#stringifying-for-specific-databases) show the error message on Date type: + +```python +# using engine +> str(filter1.compile( + engine, + compile_kwargs={"literal_binds": True}, + )) +NotImplementedError: Don't know how to literal-quote value datetime.date(2015, 1, 1) +``` + +```python +# using dialect +> from sqlalchemy.dialects import postgresql +> str(query1.statement.compile( + compile_kwargs={"literal_binds": True}, + dialect=postgresql.dialect(), + )) +NotImplementedError: Don't know how to literal-quote value datetime.date(2015, 1, 1) +``` + +### render_query() + +Base on this [stackoverflow example](https://stackoverflow.com/a/32772915/5095636), I changed the param dialect to session, and removed the python2 part, hereunder the modified one: + +```python +from datetime import date, datetime, timedelta +from sqlalchemy.orm import Query + +def render_query(statement, db_session): + """ + Generate an SQL expression string with bound parameters rendered inline + for the given SQLAlchemy statement. + WARNING: This method of escaping is insecure, incomplete, and for debugging + purposes only. Executing SQL statements with inline-rendered user values is + extremely insecure. + Based on http://stackoverflow.com/questions/5631078/sqlalchemy-print-the-actual-query + """ + if isinstance(statement, Query): + statement = statement.statement + dialect = db_session.bind.dialect + + class LiteralCompiler(dialect.statement_compiler): + def visit_bindparam( + self, bindparam, within_columns_clause=False, literal_binds=False, **kwargs + ): + return self.render_literal_value(bindparam.value, bindparam.type) + + def render_array_value(self, val, item_type): + if isinstance(val, list): + return "{}".format( + ",".join([self.render_array_value(x, item_type) for x in val]) + ) + return self.render_literal_value(val, item_type) + + def render_literal_value(self, value, type_): + if isinstance(value, int): + return str(value) + elif isinstance(value, (str, date, datetime, timedelta)): + return "'{}'".format(str(value).replace("'", "''")) + elif isinstance(value, list): + return "'{{{}}}'".format( + ",".join( + [self.render_array_value(x, type_.item_type) for x in value] + ) + ) + return super(LiteralCompiler, self).render_literal_value(value, type_) + + return LiteralCompiler(dialect, statement).process(statement) +``` + +### Using the render_query() + +The results in sqlite dialect: + +```python +> render_query(filter1, session) +"movies.release_date > '2015-01-01'" + +> render_query(query1, session) +"SELECT movies.id, movies.title, movies.release_date \nFROM movies \nWHERE movies.release_date > '2015-01-01'\n LIMIT 2 OFFSET 0" +``` + +!!! warning + + With `render_query()`, it renders the query with dialect syntax, but please be aware that the values rendered are the ones translated by `render_literal_value()`, which might not be the ones really passed to SQL database. That's also why I named this post as **nearly real raw sql query**. diff --git a/docs/posts/2020/2020-07-16-rolling-back-from-flask-restplus-reqparse-to-native-flask-request-to-parse-inputs.md b/docs/posts/2020/2020-07-16-rolling-back-from-flask-restplus-reqparse-to-native-flask-request-to-parse-inputs.md new file mode 100644 index 00000000..21397f09 --- /dev/null +++ b/docs/posts/2020/2020-07-16-rolling-back-from-flask-restplus-reqparse-to-native-flask-request-to-parse-inputs.md @@ -0,0 +1,71 @@ +--- +authors: +- copdips +categories: +- python +- flask +comments: true +date: + created: 2020-07-16 +description: flask-restplus (flask-restx) reqparse is deprecated, migration to native + flask request has some points to take care of +--- + +# Rolling back from flask-restplus reqparse to native flask request to parse inputs + +flask-restplus' (or flask-restx) [reqparse module is deprecated](https://flask-restx.readthedocs.io/en/latest/parsing.html), so I decided to use the native flask request object to parse the incoming inputs. + +After the try, I noticed some points to take care of. Before listing these points, I will show you how to use native flask request to parse the inputs. + +!!! note + + The flask-restplus official doc [suggests](https://flask-restx.readthedocs.io/en/latest/parsing.html) to use [marshmallow](https://marshmallow.readthedocs.io/en/stable/) to replace reqparse. + +## Parsing inputs with the native flask request + +The native [Flask Request](https://flask.palletsprojects.com/en/master/api/#flask.Request) object has many attributes. To parse the incoming inputs, we can mainly use: + +```python +from flask import request +request.args +request.json +request.data +request.form +request.headers +request.authorization +``` + +`request` is a [global object](https://flask.palletsprojects.com/en/master/api/#flask.request) always available in any active request contexts. + +## Point 1. Smart boolean type + +flask-restplus's boolean type is actually a [smart boolean type](https://github.com/python-restx/flask-restx/blob/a28f9c11566adbfe307cf6784905469e5cdaf543/flask_restx/inputs.py#L507), which can convert bool True, or string "True", "tRue", "1" etc., or int 1 to True, so as to False. This is very smart. + +```python +parser.add_argument('flag', type=inputs.boolean) +``` + +When I rolled back to using the flask.request, there's no such smartness, so be careful how the API parsed the inputs with flask-restplus previously. If it accepted for example the string 'false' as smart boolean, which will be converted to boolean `False` with flask-restplus, once migrated to the native flask.request.json, the string 'false' is considered as a boolean `True`. + +```python +>>> bool("false") +True +``` + +So maybe as a quick backward compatible workaround, we can reuse the smart boolean source code. + +## Point 2. Optional inputs + +flask-restplus can define an optional input like this: + +```python +parser.add_argument('name', required=False, help="Name cannot blank!") +``` + +If user doesn't provide `name` in the inputs, the reqparse will render it as `{"name": None}`, which means the optional input has `None` as its default value. + +But in the native flask.request.json, we won't see this input at all if it was not provided. So if the API backend must need the input `name`, we must add some protection. + +## Tests + +In the end, I would just like to suggest everyone to write as many tests as we can to cover all the use cases. diff --git a/docs/posts/2020/2020-11-24-my-powerline.md b/docs/posts/2020/2020-11-24-my-powerline.md new file mode 100644 index 00000000..0669a8ee --- /dev/null +++ b/docs/posts/2020/2020-11-24-my-powerline.md @@ -0,0 +1,153 @@ +--- +authors: +- copdips +categories: +- linux +- shell +comments: true +date: + created: 2020-11-24 +description: 'Just my way to setup and configure powerline in WSL ' +--- + +# My Powerline setup and configuration + +!!! warning + + If you're working in an enterprise environment, and you don't have the admin rights on your Windows desktop to install additional fonts, or your enterprise admin cannot do that, then I suggest you to **ignore this post**, powerline will be installed, but very ugly. If you have a Linux desktop, all will be OK, installing fonts doesn't need to be root. + +- [https://github.com/powerline/powerline](https://github.com/powerline/powerline) + +- [https://powerline.readthedocs.io/en/latest/configuration.html#quick-setup-guide](https://powerline.readthedocs.io/en/latest/configuration.html#quick-setup-guide) + +## Installing powerline-status from pip + +```bash +pip3 install powerline-status --user +pip3 show powerline-status +user_python_site_packages=$(pip3 show powerline-status | grep Location: | awk '{print $2}') +powerline_global_config_files_path="$user_python_site_packages/powerline/config_files" +mkdir -p ~/.config/powerline +cp -r $powerline_global_config_files_path/. ~/.config/powerline +``` + +## Installing fonts + +https://powerline.readthedocs.io/en/latest/installation/linux.html#fonts-installation + +```bash +apt install fontconfig +mkdir -p ~/.local/share/fonts/ +mkdir -p ~/.config/fontconfig/conf.d/ +wget https://github.com/powerline/powerline/raw/develop/font/PowerlineSymbols.otf +wget https://github.com/powerline/powerline/raw/develop/font/10-powerline-symbols.conf +mv PowerlineSymbols.otf ~/.local/share/fonts/ +fc-cache -vf ~/.local/share/fonts/ +mv 10-powerline-symbols.conf ~/.config/fontconfig/conf.d/ +``` + +## Installing additional fonts + +[https://github.com/powerline/fonts#quick-installation](https://github.com/powerline/fonts#quick-installation) + +## Adding VIM support + +- [https://powerline.readthedocs.io/en/latest/usage.html#vim-plugin-requirements](https://powerline.readthedocs.io/en/latest/usage.html#vim-plugin-requirements) + +- [https://powerline.readthedocs.io/en/latest/usage/other.html#vim-statusline](https://powerline.readthedocs.io/en/latest/usage/other.html#vim-statusline) + +If Python support is absent then Vim needs to be compiled with it. To do this use `--enable-pythoninterp` `./configure` flag (Python 3 uses `--enable-python3interp` flag instead). Note that this also requires the related Python headers to be installed. Please consult distribution’s documentation for details on how to compile and install packages. + +Check VIM with python support: + +```bash +vim --version | grep +python +``` + +if you don't have `+python` or `+python3`, you can install VIM from source by enable python support: [https://github.com/ycm-core/YouCompleteMe/wiki/Building-Vim-from-source](https://github.com/ycm-core/YouCompleteMe/wiki/Building-Vim-from-source) + +Add following lines to `$HOME/.vimrc`: + +```bash +python3 from powerline.vim import setup as powerline_setup +python3 powerline_setup() +python3 del powerline_setup +``` + +## Adding Ipython support + +[https://powerline.readthedocs.io/en/latest/usage/other.html#ipython-prompt](https://powerline.readthedocs.io/en/latest/usage/other.html#ipython-prompt) + +**Doesn't work for ipython v7+**: [https://github.com/powerline/powerline/issues/1953](https://github.com/powerline/powerline/issues/1953) + +## Adding PBD support + +[https://powerline.readthedocs.io/en/latest/usage/other.html#pdb-prompt](https://powerline.readthedocs.io/en/latest/usage/other.html#pdb-prompt) + +## Adding Bash support + +[https://powerline.readthedocs.io/en/latest/usage/shell-prompts.html#bash-prompt](https://powerline.readthedocs.io/en/latest/usage/shell-prompts.html#bash-prompt) + +Add following lines to `~/.bashrc`: + +python path must be available before `powerline-daemon -q` + +```bash +powerline-daemon -q +POWERLINE_BASH_CONTINUATION=1 +POWERLINE_BASH_SELECT=1 +. {repository_root}/powerline/bindings/bash/powerline.sh + + + +``` + +## Adding Git support + +[https://github.com/jaspernbrouwer/powerline-gitstatus](https://github.com/jaspernbrouwer/powerline-gitstatus) + +```bash +pip3 install powerline-gitstatus +``` + +Add to `~/.config/powerline/colorschemes/default.json`: + +```bash +{ + "groups": { + "gitstatus": { "fg": "gray8", "bg": "gray2", "attrs": [] }, + "gitstatus_branch": { "fg": "gray8", "bg": "gray2", "attrs": [] }, + "gitstatus_branch_clean": { "fg": "green", "bg": "gray2", "attrs": [] }, + "gitstatus_branch_dirty": { "fg": "gray8", "bg": "gray2", "attrs": [] }, + "gitstatus_branch_detached": { "fg": "mediumpurple", "bg": "gray2", "attrs": [] }, + "gitstatus_tag": { "fg": "darkcyan", "bg": "gray2", "attrs": [] }, + "gitstatus_behind": { "fg": "gray10", "bg": "gray2", "attrs": [] }, + "gitstatus_ahead": { "fg": "gray10", "bg": "gray2", "attrs": [] }, + "gitstatus_staged": { "fg": "green", "bg": "gray2", "attrs": [] }, + "gitstatus_unmerged": { "fg": "brightred", "bg": "gray2", "attrs": [] }, + "gitstatus_changed": { "fg": "mediumorange", "bg": "gray2", "attrs": [] }, + "gitstatus_untracked": { "fg": "brightestorange", "bg": "gray2", "attrs": [] }, + "gitstatus_stashed": { "fg": "darkblue", "bg": "gray2", "attrs": [] }, + "gitstatus:divider": { "fg": "gray8", "bg": "gray2", "attrs": [] } + } +} +``` + +Add to `~/.config/powerline/themes/shell/default.json`: + +```bash +{ + "function": "powerline_gitstatus.gitstatus", + "priority": 40 +} +``` + +Add to `~/.config/powerline/themes/shell/__main__.json`: + +```bash +"gitstatus": { + "args": { + "show_tag": "exact" + } +} +``` diff --git a/docs/posts/2021/2021-01-04-python-lint-and-format.md b/docs/posts/2021/2021-01-04-python-lint-and-format.md new file mode 100644 index 00000000..4a4c7842 --- /dev/null +++ b/docs/posts/2021/2021-01-04-python-lint-and-format.md @@ -0,0 +1,783 @@ +--- +authors: +- copdips +categories: +- python +- linter +comments: true +date: + created: 2021-01-04 + updated: 2023-06-10 +description: Some commands to lint and format Python files +--- + +# Python Lint And Format + +## Azure SDK Python Guidelines + +[https://azure.github.io/azure-sdk/python_implementation.html](https://azure.github.io/azure-sdk/python_implementation.html) + +## Lint + +Update 2023-05-21: Replaced flake8, pylint, and isort by [ruff](https://github.com/charliermarsh/ruff). When replacing pylint, should [add check by mypy](https://beta.ruff.rs/docs/faq/#how-does-ruff-compare-to-pylint). +Update 2023-11-07: Bandit could be replaced by ruff too with the support of flake-bandit. +!!! note + + The nearly only thing that ruff can not do for the moment is the [type checking](https://docs.astral.sh/ruff/faq/#how-does-ruff-compare-to-mypy-or-pyright-or-pyre). + +### ruff + +```bash +ruff . +ruff check . # check is the default command so can be ignore + +# show ignored ruff alerts +ruff . --ignore-noqa --exit-zero +``` + +### pylint + +!!! note + + Could be replaced by [ruff](https://github.com/charliermarsh/ruff). + +As pylint has too many options, it's recommended to use the pylint config file: + +```bash +# file ~/.pylintrc, can be generated by pylint --generate-rcfile + +[MASTER] + +[MESSAGES CONTROL] +disable= + C0116, # Missing function or method docstring (missing-function-docstring) + W1203, # Use lazy % formatting in logging functions (logging-fstring-interpolation) + +[format] +max-line-length = 88 + +[MISCELLANEOUS] +# List of note tags to take in consideration, separated by a comma. +notes=FIXME + +[VARIABLES] + +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid defining new builtins when possible. +additional-builtins= + spark +``` + +But we can also ignore some warnings directly in the pylint command: + +```bash +pylint . -j 0 --disable=C0116,W1203 +``` + +To show all the inline ignored pylint alerts: `pylint --enable=suppressed-message` + +#### Ignore Unused Argument given a Function Name Expression + +Use [dummy variable](https://pylint.pycqa.org/en/latest/user_guide/configuration/all-options.html#dummy-variables-rgx) to ignore the Pylint warning on [unused-argument](https://github.com/PyCQA/pylint/issues/1057). + +### flake8 + +!!! note + + Could be replaced by [ruff](https://github.com/charliermarsh/ruff). + +```bash +# ignore W503 because of black format. BTW, flake8 also has W504 which is in contrary to W503. +# ignore E501, line too long because we have the same check at Pylint side already. +flake8 . \ + --exclude=venv \ + --extend-ignore=E203,E501,W503, \ + --max-complexity=7 \ + --show-source \ + --statistics \ + --count \ + --jobs=auto + +flake8 [a_file_path] +``` + +To show all the inline ignored flake8 alerts: `flake8 --disable-noqa || true` + +There's a very nice flake8 plugin called [**flake8-cognitive-complexity**](https://github.com/Melevir/flake8-cognitive-complexity) which checks the [Cognitive Complexity](https://blog.sonarsource.com/cognitive-complexity-because-testability-understandability) in addition to the Cyclomatic Complexity provided by flake8 out of the box. We dont need to add extra parameter to use the Cognitive Complexity in flake8, it's set to `--max-cognitive-complexity=7` by default once the plugin is installed. By the way, Sonar sets the Cognitive Complexity threshold to 15 by default. + +To fix `imported but not used` error in `__init__.py` file, could by [__all__ attribute](https://stackoverflow.com/a/31079085/5095636) (the most elegant) or by [--per-file-ignores](https://flake8.pycqa.org/en/latest/user/options.html#cmdoption-flake8-per-file-ignores). + +### bandit + +!!! note + + Update 2023-11-07: [ruff](https://docs.astral.sh/ruff/rules/#flake8-bandit-s) supports flake-bandit, so we can use ruff instead of bandit now, vscode also supports to [switch to flakes-bandit](https://github.com/microsoft/vscode-python/wiki/Migration-to-Python-Tools-Extensions#alternatives-for-deprecated-settings--). + +The bandit config file format is not well documented, I passed a lot of time to test the config. + +```bash +$ cat .bandit +# https://github.com/PyCQA/bandit/issues/400 +exclude_dirs: + - "./venv/*" + +# https://github.com/PyCQA/bandit/pull/633 +assert_used: + skips: + - "*/*_test.py" + - "*/test_*.py" +``` + +```bash +# without specifying -c ./bandit, it doesn't work +$ bandit . -r -c ./.bandit +``` + +### ossaudit + +[ossaudit](https://github.com/illikainen/ossaudit) uses [Sonatype OSS Index](https://ossindex.sonatype.org/) to audit Python packages for known vulnerabilities. + +It can check installed packages and/or packages specified in dependency files. The following formats are supported with [dparse](https://github.com/pyupio/dparse): + +- PIP requirement files +- Pipfile +- Pipfile.lock +- tox.ini +- conda.yml + +```bash +# check installed packages and packages listed in two requirements files +$ ossaudit --installed --file requirements.txt --file requirements-dev.txt +Found 0 vulnerabilities in 214 packages +``` + +Github has already provided, free of charge, the [vulnerable dependencies alert](https://docs.github.com/en/code-security/supply-chain-security/managing-vulnerabilities-in-your-projects-dependencies/about-alerts-for-vulnerable-dependencies). + +### pyright + +faster than mypy. + +```bash +pyproject.toml: + +```toml +[tool.pyright] +reportUnnecessaryTypeIgnoreComment = true +include = [] +exclude = [] +``` + +running pyright: + +```bash + +## scan pathes specified in pyproject.toml include, exclude +pyright + +## scan current folder and subfolders in spite of pyproject.toml include, exclude +pyright . +``` + +### mypy + +For projects having sqlalchemy, we often install the `sqlalchemy-stubs` plugin as sqlalchemy uses some dynamic classes. + +And also django-stubs, pandas-stubs, types-setuptools, types-requests etc. + +[mypy config file](https://mypy.readthedocs.io/en/stable/config_file.html): + +```ini +[mypy] +ignore_missing_imports = True # We recommend using this approach only as a last resort: it's equivalent to adding a # type: ignore to all unresolved imports in your codebase. +plugins = sqlmypy # sqlalchemy-stubs +exclude = (?x)( + ^venv + | ^build + ) +``` + +running mypy: + +```bash +mypy . +mypy . --exclude [a regular expression that matches file path] +mypy . --exclude venv[//] # exclude venv folder under the root +``` + +!!! warning + + When using mypy, it would be better to use mypy against to [all files in the project](https://github.com/python/mypy/issues/13916), but ont some of them, + + + +### ignore lint error in one line + +| linter | ignore in one line | +| ---------------- | -------------------------------------------------------------------------------------- | +| ruff | (2 spaces)# noqa: {errorIdentifier} | +| pylint | (2 spaces)# pylint: disable={errorIdentifier} | +| flake8 | (2 spaces)# noqa: {errorIdentifier} | +| bandit | (2 spaces)# nosec | +| pyright | (2 spaces)# pyright: ignore [reportOptionalMemberAccess, reportGeneralTypeIssues] | +| mypy | (2 spaces)# type: ignore | +| multiple linters | (2 spaces)# type: ignore # noqa: {errorIdentifier} # pylint: disable={errorIdentifier} | + +To ignore Pylint within a code block + +```python + +## https://stackoverflow.com/a/48836605/5095636 +import sys +sys.path.append("xx/xx") + +## pylint: disable=wrong-import-position +from x import ( # noqa: E402 + a, + b, +) +from y import c # noqa: E402 + +## pylint: enable=wrong-import-position +``` + +## Format + +### isort + +!!! note + + Could be replaced by [ruff](https://github.com/charliermarsh/ruff). + + + +```bash +isort . --profile=black --virtual-env=venv --recursive --check-only +isort . --profile=black --virtual-env=venv --recursive +isort [a_file_path] +``` + +!!! warning + + Be very careful with isort, it's not uncompromising, especially for some codes that dynamically import some modules inside a function instead of from the beginning of a file. People use often this to avoid circular import problem. Always run the tests after the isort. + + + +### black + +```bash +black . --check +black . +black [a_file_path] +``` + +Using black with other tools: [https://black.readthedocs.io/en/stable/guides/using_black_with_other_tools.html](https://black.readthedocs.io/en/stable/guides/using_black_with_other_tools.html) + +## VSCode + +Just my 2 cents, try the [errorlens](https://marketplace.visualstudio.com/items?itemName=usernamehw.errorlens) extension in VSCode, it will lint all the warnings/errors on live when coding, it's really cool. + +And don't forget to install the official [SonarLint](https://marketplace.visualstudio.com/items?itemName=SonarSource.sonarlint-vscode) extension, it will give you extra lint. It eats a lot of memory with its java processes nevertheless. + +```json + "python.formatting.provider": "none", + "[python]": { + "editor.defaultFormatter": "ms-python.black-formatter", + "editor.formatOnSave": true, + "editor.codeActionsOnSave": { + // "source.organizeImports": true + }, + }, + "python.linting.banditEnabled": true, + "python.linting.banditArgs": [ + "-r", + "-c", + "~/pyproject.toml" + ], + "python.linting.ignorePatterns": [ + ".vscode/*.py", + "**/site-packages/**/*.py", + "venv/" + ], + "python.linting.mypyEnabled": true, + "python.linting.mypyArgs": [ + "--follow-imports=silent", + "--ignore-missing-imports", + "--show-column-numbers", + "--no-pretty", + "--warn-return-any", + "--warn-unused-configs", + "--show-error-codes" + ], + "sonarlint.connectedMode.connections.sonarqube": [ + { + "serverUrl": "https://sonar.xxx", + "connectionId": "sonar.xxx" + } + ], + "[json]": { + "editor.defaultFormatter": "esbenp.prettier-vscode", + // "editor.defaultFormatter": "esbenp.prettier-vscode", + "editor.formatOnSave": true + }, + "[jsonc]": { + "editor.defaultFormatter": "vscode.json-language-features" + }, +``` + +## pyproject.toml + +`pyproject.toml` is the new standard in Python introduced by [PEP 518](https://peps.python.org/pep-0518/) (2016) for build system requirements, [PEP 621](https://peps.python.org/pep-0621/) (2020) for project metadata, and [PEP 660](https://peps.python.org/pep-0660/) (2021) for wheel based editable installs. + +It's fun to know [why Python authority chose this name](https://peps.python.org/pep-0518/#other-file-names), and very interesting to understand their POV of [different file formats](https://peps.python.org/pep-0518/#other-file-formats) :smile:. + +All the the major tools (setuptools, pip-tools, poetry) support this new standard, and the repo [awesome-pyproject](https://github.com/carlosperate/awesome-pyproject/) maintains a list of Python tools which are compatible to `pyproject.toml`. + +!!! warning + + We cannot officially [declare flake8 config in pyproject.toml](https://github.com/PyCQA/flake8/issues/234). + + + +Hereunder an example of its content for the lint part. + +```toml +[tool.ruff] +fix = true +show-fixes = true +select = [ + "ALL", + # "E", # pycodestyle errors + # "W", # pycodestyle warnings + # "F", # pyflakes + # "I", # isort + # "C", # flake8-comprehensions + # "B", # flake8-bugbear +] +ignore = [ + # https://beta.ruff.rs/docs/rules/ + "D", # pydocstyle + "E501", # line too long, handled by black + "B008", # do not perform function calls in argument defaults + "ANN", # flake8-annotations + # "C901", # too complex + # "PTH123", # pathlib-open - this would force pathlib usage anytime open or with open was used. +] + +[tool.ruff.isort] + +## Combine multiple `from foo import bar as baz` statements with the same source + +## (`foo`) into a single statement. +combine-as-imports = true + +## Imports of the form `from foo import bar as baz` show one `import bar as baz` + +## per line. Useful for __init__.py files that just re-export symbols. +force-wrap-aliases = true + +[tool.ruff.per-file-ignores] + +## Don't format docstrings in alembic migrations. +"**/alembic/versions/*.py" = ["D"] +"tests/**/*.py" = [ + "S101", # asserts allowed in tests... + "ARG", # Unused function args -> fixtures nevertheless are functionally relevant... + "FBT", # Don't care about booleans as positional arguments in tests, e.g. via @pytest.mark.parametrize() +] +[tool.ruff.pep8-naming] +classmethod-decorators = ["pydantic.validator"] + +[tool.pyright] +reportUnnecessaryTypeIgnoreComment = true + +## mypy not used in favor of pyright + +## [tool.mypy] + +## incremental = true + +## ignore_missing_imports = true + +## warn_return_any = true + +## warn_unused_configs = true + +## # disallow_untyped_defs = true + +## exclude = [ + +## "^.venv/", + +## "^build/", + +## "^_local_test/", +## ] + +[tool.bandit] +exclude_dirs = [".venv", "_local_test"] +skips = ["B101"] +## tests = ["B201", "B301"] + +## replaced by ruff with mypy + +## [tool.pylint.main] + +## # ! type to use pyspark-stubs + +## # extension-pkg-allow-list = ["pyspark"] + +## # ignored-modules = ["pyspark"] + +## jobs = 0 + +## # [tool.pylint.typecheck] + +## # # ! type to use pyspark-stubs + +## # generated-members = ["pyspark.sql.functions"] + +## [tool.pylint.variables] + +## # List of additional names supposed to be defined in builtins. Remember that + +## # you should avoid defining new builtins when possible. + +## # additional-builtins = ["spark"] + +## [tool.pylint."messages control"] + +## disable = [ + +## "missing-class-docstring", + +## "missing-module-docstring", + +## "missing-function-docstring", + +## "logging-fstring-interpolation", + +## ] + +## [tool.pylint.miscellaneous] + +## notes = ["FIXME"] + +## [tool.pylint.format] + +## max-line-length = 88 + +## expected-line-ending-format = "LF" +## ignore-long-lines = "^\\s*(# )??$" + +[tool.pytest.ini_options] +testpaths=["tests/unit"] +addopts=""" + -v -s + --junitxml=junit/test-results.xml + --cov app + --cov-report=html + --cov-report=xml + --cov-report=term-missing:skip-covered + --cov-fail-under=95 + """ +``` + +## Git pre-commit + +[https://pre-commit.com/](https://pre-commit.com/) + +> "Git hook scripts are useful for identifying simple issues before submission to code review. We run our hooks on every commit to automatically point out issues in code such as missing semicolons, trailing whitespace, and debug statements. By pointing these issues out before code review, this allows a code reviewer to focus on the architecture of a change while not wasting time with trivial style nitpicks." + +```bash +pip install pre-commit +pre-commit install + +## install the script along with the hook environments in one command + +## https://pre-commit.com/index.html#pre-commit-install-hooks +pre-commit install --install-hooks + +## Auto-update pre-commit config to the latest repos' versions. +pre-commit autoupdate + +## Clean out cached pre-commit files. +pre-commit clean + +## Clean unused cached repos. +pre-commit gc + +## Run single check +pre-commit run black + +## continuous integration + +## https://pre-commit.com/index.html#usage-in-continuous-integration +pre-commit run --all-files + +## check only files which have changed +pre-commit run --from-ref origin/HEAD --to-ref HEAD + +## Azure pipeline example with cache +https://pre-commit.com/index.html#azure-pipelines-example + +## automatically enabling pre-commit on repositories + +## https://pre-commit.com/index.html#automatically-enabling-pre-commit-on-repositories +git config --global init.templateDir ~/.git-template +pre-commit init-templatedir ~/.git-template +``` + +### Online examples + +[pylint github pre-commit-config.yaml](https://github.com/PyCQA/pylint/blob/main/.pre-commit-config.yaml) + +### Create a file named `.pre-commit-config.yaml` to the root of your project + +!!! note + + Although each lint has its own config to exclude some files from checking, pre-commit also has the key [exclude](https://pre-commit.com/#hooks-exclude) with list value or [regex](https://pre-commit.com/#regular-expressions) to exclude file from sending to linter. + + + +!!! note + + `language: system` means using the executables from the same environment of current Python interpreter. + + + +!!! warning + + When using mypy in pre-commit, it would be better run `pre-commit run --all-files`, mypy [doesn't work well with only diff files](https://github.com/python/mypy/issues/13916) sent by `pre-commit run --from-ref origin/${pullrequest_target_branch_name} --to-ref HEAD`. + + + +```yaml + +## Installation: + +## pip install pre-commit + +## pre-commit install +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.3.0 + hooks: + - id: check-json + exclude: devcontainer.json + - id: check-yaml + - id: check-toml + - id: end-of-file-fixer + - id: trailing-whitespace + - id: debug-statements + - id: requirements-txt-fixer + - id: detect-private-key + - id: mixed-line-ending + args: ["--fix=lf"] + - id: check-added-large-files + - id: no-commit-to-branch + - repo: https://github.com/Lucas-C/pre-commit-hooks + rev: v1.3.1 + hooks: + - id: forbid-crlf + - id: remove-crlf + - id: forbid-tabs + - id: remove-tabs + - repo: https://github.com/pre-commit/mirrors-prettier + rev: v3.0.0-alpha.1 + hooks: + - id: prettier + - repo: https://github.com/pre-commit/pygrep-hooks + rev: v1.9.0 + hooks: + - id: python-check-blanket-type-ignore + - id: python-check-mock-methods + - id: python-no-log-warn + - id: python-use-type-annotations + - repo: https://github.com/asottile/pyupgrade + rev: v3.1.0 + hooks: + - id: pyupgrade + - repo: local + hooks: + - id: bandit + name: bandit + entry: bandit + language: system + types: [python] + args: + - -c + - pyproject.toml + - id: ruff + name: ruff + entry: ruff + language: system + types: [python] + args: + - "." + - id: black + name: black + entry: black + language: system + types: [python] + - id: pyright + name: pyright + language: system + entry: pyright + types: [python] + - id: pytest + name: pytest + types: [python] + entry: pytest + language: system + pass_filenames: false + always_run: true + +``` + +!!! warning + + Be aware that especially in a local environment, we often use venv, in such case, it would be better to use above system level lint executables instead of below public ones, the checks will be more accurate. + + + +```yml + +## example of using online linters + +## Installation: + +## pip install pre-commit + +## pre-commit install +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.4.0 + hooks: + - id: check-json + exclude: devcontainer.json + - id: check-yaml + - id: check-toml + - id: end-of-file-fixer + - id: trailing-whitespace + - id: debug-statements + - id: requirements-txt-fixer + - id: detect-private-key + - id: mixed-line-ending + args: ["--fix=lf"] + - id: check-added-large-files + - id: no-commit-to-branch + - repo: https://github.com/Lucas-C/pre-commit-hooks + rev: v1.5.1 + hooks: + - id: forbid-crlf + - id: remove-crlf + - id: forbid-tabs + - id: remove-tabs + - repo: https://github.com/pre-commit/mirrors-prettier + rev: v3.0.0-alpha.9-for-vscode + hooks: + - id: prettier + exclude: ".md" + - repo: https://github.com/pre-commit/pygrep-hooks + rev: v1.10.0 + hooks: + - id: python-check-blanket-type-ignore + - id: python-check-mock-methods + - id: python-no-log-warn + - id: python-use-type-annotations + - repo: local + hooks: + - id: bandit + name: bandit + entry: bandit + language: system + types: [python] + args: + - -c + - pyproject.toml + - id: ruff + name: ruff + entry: ruff + language: system + types: [python] + args: + - --fix + - id: black + name: black + entry: black + language: system + types: [python] + - id: mypy + name: mypy + language: system + entry: mypy + types: [python] + args: + # - --strict + - --show-error-codes + - id: pytest + name: pytest + types: [python] + entry: pytest + language: system + pass_filenames: false + always_run: true +``` + +### Install the git hook scripts + +```bash +$ pre-commit install +pre-commit installed at .git/hooks/pre-commit + +$ pre-commit install --hook-type post-merge +pre-commit installed at .git/hooks/post-merge + +$ pre-commit install --hook-type pre-merge-commit +pre-commit installed at .git/hooks/pre-merge-commit +``` + +!!! note + + You could also run `pre-commit install --hook-type pre-push` to register pre-push hooks. + + + +### Run against all the files + +> "it's usually a good idea to run the hooks against all of the files when adding new hooks (usually pre-commit will only run on the changed files during git hooks)" + +```bash +pre-commit run --all-files +``` + +### Run for changed files only in CI + +Please check also this [official doc](https://pre-commit.com/#usage-in-continuous-integration). + +```bash +git fetch origin +pre-commit run --from-ref origin/${pullrequest_target_branch_name} --to-ref HEAD +``` + +!!! warning + + When using mypy, it would be better to use mypy against to [all files in the project](https://github.com/python/mypy/issues/13916), but not the changed one only. + + + +### Git commit + +Each time we use git commit to stage some files, these files will be sent to pre-commit to be checked against to the hooks defined in `.pre-commit-config.yaml`. + +### Temporarily disabling hooks + +The [official doc](https://pre-commit.com/#temporarily-disabling-hooks) gives the example how to disable explicitly hooks by hooks' ids: `SKIP=flake8 git commit -m "foo"`, but if you want to disable completely all the hooks, an easy way might be found [here](https://stackoverflow.com/a/7230886) by using `git commit --no-verify` or its shortcut `git commit -n`. If you use [pre-commit during push](https://pre-commit.com/#pre-commit-during-push), you can disable pre-commit during push by `git push --no-verify` or `git push -n`. + +### Automatically enabling pre-commit on repositories + +[https://pre-commit.com/#automatically-enabling-pre-commit-on-repositories](https://pre-commit.com/#automatically-enabling-pre-commit-on-repositories) + +### Usage in continuous integration + +[https://pre-commit.com/#usage-in-continuous-integration](https://pre-commit.com/#usage-in-continuous-integration) diff --git a/docs/posts/2021/2021-01-22-python-requests-with-retry.md b/docs/posts/2021/2021-01-22-python-requests-with-retry.md new file mode 100644 index 00000000..9f50cc11 --- /dev/null +++ b/docs/posts/2021/2021-01-22-python-requests-with-retry.md @@ -0,0 +1,88 @@ +--- +authors: +- copdips +categories: +- python +comments: true +date: + created: 2021-01-22 + updated: 2021-03-20 +description: Make python requests retry easily to use +--- + +# Python Requests With Retry + +There're several solutions to retry a HTTP request with [Requests](https://requests.readthedocs.io/en/master/) module, some of them are: + +1. Native Requests' retry based on urllib3's [HTTPAdapter](https://2.python-requests.org/en/master/api/#requests.adapters.HTTPAdapter). +2. Third party module: [backoff](https://github.com/litl/backoff). +3. Third party module: [tenacity](https://github.com/jd/tenacity). + +The native **HTTPAdapter** is not easy to use. The **tenacity** module is very powerful, but is also more or less overkill because it's a general Python retry utility, and doesn't throw the same exception `requests.exceptions.HTTPError` raised by `raise_for_status()` of Requests. Using tenacity to an ongoing project might involve some code refactoring. So this post will just show some snippets to make retry with the **backoff** module. + +!!! warning + + Usually, we should only retry on [idempotent verbs](https://developer.mozilla.org/en-US/docs/Glossary/Idempotent#technical_knowledge), we can get the same thing twice but we don't want to create the same thing twice. On the other hand, sometimes the specific environment that we're working on might have a POST as idempotent too, so make sure of that before using the retry. + +## Using backoff to retry + +```python +import logging +from logging import Logger + +import backoff +import requests +from requests.exceptions import HTTPError +import urllib3 + +urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) + + +# in an internal enterprise environment, we often need to disable the proxy and ignore the ssl check. Of course, if you don't trust the target, then verify the ssl. +NO_PROXY = {"http": None, "https": None} +COMMON_REQUESTS_PARAMS = {"verify": False, "proxies": NO_PROXY} + + +# This snippet only retries on the response return code >= 500 +def fatal_code(e): + return 400 <= e.response.status_code < 500 + + +BACKOFF_RETRY_ON_EXCEPTION_PARAMS = { + # expo: [1, 2, 4, 8, etc.] https://github.com/litl/backoff/blob/master/backoff/_wait_gen.py#L6 + "wait_gen": backoff.expo, + # HTTPError raised by raise_for_status() + # HTTPError code list: https://github.com/psf/requests/blob/master/requests/models.py#L943 + "exception": (HTTPError,), + "max_tries": 4, + "max_time": 50, # nginx closes a session at 60' second by default + "giveup": fatal_code, +} + + +@backoff.on_exception(**BACKOFF_RETRY_ON_EXCEPTION_PARAMS) +def request_with_retry( + should_log: bool = False, + logger: Logger = logging.getLogger(), + logger_level: str = "info", + **request_params +): + full_params = COMMON_REQUESTS_PARAMS | request_params + requests_params_keys_to_log = ["data", "json", "params"] + if should_log: + params_message = "" + for key in requests_params_keys_to_log: + if key in request_params: + params_message += " with {} {}".format(key, request_params[key]) + log_message = "[{}] {} with params{}.".format( + full_params["method"], full_params["url"], params_message + ) + getattr(logger, logger_level.lower())(log_message) + response = requests.request(**full_params) + response.raise_for_status() + return response + +# how to use: +request_params = {"method": "get", "url": "http://localhost"} +response = request_with_retry(**request_params) +``` diff --git a/docs/posts/2021/2021-03-06-trying-python-pipreqs-and-pip-tools.md b/docs/posts/2021/2021-03-06-trying-python-pipreqs-and-pip-tools.md new file mode 100644 index 00000000..c518cc31 --- /dev/null +++ b/docs/posts/2021/2021-03-06-trying-python-pipreqs-and-pip-tools.md @@ -0,0 +1,152 @@ +--- +authors: +- copdips +categories: +- python +- pip +comments: true +date: + created: 2021-03-06 +description: Some simple demos to test pipreqs and pip-tools +--- + +# Trying Python pipreqs and pip-tools + +Relative to [pipenv](https://github.com/pypa/pipenv), and [poetry](https://github.com/python-poetry/poetry), if you're searching for some lightweight python package managers for a small project, I will introduce 2 handy tools for you: [pipreqs](https://github.com/bndr/pipreqs) and [pip-tools](https://github.com/jazzband/pip-tools). + + + +## pipreqs + +[pipreqs github](https://github.com/bndr/pipreqs) + +Suppose you are onboarded to an existing project where only pip is used. The requirements.txt file is generated by `pip freeze`. and it contains more than 30 lines of requirements, in addition the team cannot remember the original basic requirements anymore. One of the simple ways to rebuild the basic requirements (the project dependency but not all the underlying dependencies) is to use `pipreqs`. + +How to use: + +```bash +# First of all, just backup your current requirements.txt +$ mv requirements.txt{,.bck} + +# at this moment, there's no more requirements.txt, then run pipreqs +$ pipreqs /home/project/location +Successfully saved requirements file in /home/project/location/requirements.txt +``` + +Let's use the `--debug` option to see what it does in background + +```bash +# I'm running pipreqs from the root path of a Flask project +$ pipreqs . --debug +DEBUG: Found packages: {'json', 'flask_webtest', 'time', 'requests', 'webtest', 'sys', 'flask', 'os', 'pathlib', 'setuptools', 'unittest', 'werkzeug'} +DEBUG: Found imports: Flask, flask_webtest, Requests, WebTest, Werkzeug +DEBUG: Getting packages information from Local/PyPI +DEBUG: Starting new HTTPS connection (1): pypi.python.org:443 +DEBUG: https://pypi.python.org:443 "GET /pypi/flask_webtest/json HTTP/1.1" 301 122 +DEBUG: Starting new HTTPS connection (1): pypi.org:443 +DEBUG: https://pypi.org:443 "GET /pypi/flask_webtest/json HTTP/1.1" 301 221 +DEBUG: https://pypi.org:443 "GET /pypi/Flask-WebTest/json HTTP/1.1" 200 2155DEBUG: Starting new HTTPS connection (1): pypi.python.org:443 +DEBUG: https://pypi.python.org:443 "GET /pypi/WebTest/json HTTP/1.1" 301 122DEBUG: Starting new HTTPS connection (1): pypi.org:443 +DEBUG: https://pypi.org:443 "GET /pypi/WebTest/json HTTP/1.1" 200 12870 + +$ cat ./requirements.txt +Werkzeug==1.0.1 +Flask==1.1.2 +requests==2.25.1 +flask_webtest==0.0.9 +WebTest==2.0.35 +``` + +pipreqs has also some other useful [options](https://github.com/bndr/pipreqs#usage)(--no-pin, --force). + +## pip-tools + +[pip-tools github](https://github.com/jazzband/pip-tools) + +Another missing feature of the native pip is that `pip freeze` doesn't provide the packages dependencies. All the packages installed in the venv are listed in a single requirements.txt file, in the same top level with only the version info. Pipenv and Poetry resolve this issue, and introduce some lock system. But they're not the native requirements.txt way. By using pip-tools, we can resolve this issue too and at the same time keep using requirements.txt. I found this tool occasionally by checking [Flask project requirements](https://github.com/pallets/flask/blob/master/requirements/dev.txt). + +The idea of pip-tools is to maintain a project basic dependency in a file called `requirements.in`, then use pip-tools to generate the `requirements.txt` file with all the dependencies including the underlying dependencies info inside but in the comments part. + +Please be aware that: **pip-tools = pip-compile + pip-sync** + +### pip-compile + +I'm running a small Flask project, the only package I need is just a single Flask. Let's see an example of pip-compile by using `requirements.in` file without `setup.py`, + +```bash +(venv)$ cat requirements.in +Flask + +(venv)$ pip-compile requirements.in + +(venv)$ cat requirements.txt +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile requirements.in +# +click==7.1.2 + # via flask +flask==1.1.2 + # via -r r.in +itsdangerous==1.1.0 + # via flask +jinja2==2.11.3 + # via flask +markupsafe==1.1.1 + # via jinja2 +werkzeug==1.0.1 + # via flask +``` + +The Solution for with `setup.py` is [here](https://github.com/jazzband/pip-tools#requirements-from-setuppy). + +### pip-sync + +Let's see an example of pip-sync: + +```bash +# Upgrade Werkzeug to a rc version that is newer that the once listed by requirements.txt +(venv)$ pip install Werkzeug==2.0.0rc2 +Collecting Werkzeug==2.0.0rc2 + Downloading Werkzeug-2.0.0rc2-py3-none-any.whl (284 kB) + |████████████████████████████████| 284 kB 3.3 MB/s +Installing collected packages: werkzeug + Attempting uninstall: werkzeug + Found existing installation: Werkzeug 1.0.1 + Uninstalling Werkzeug-1.0.1: + Successfully uninstalled Werkzeug-1.0.1 +Successfully installed werkzeug-2.0.0rc2 + +# Use pip-sync to downgrade Werkzeug in order to make the venv to have exactly the same version of dependencies listed in requirements.txt +(venv)$ pip-sync +Collecting Werkzeug==1.0.1 + Using cached Werkzeug-1.0.1-py2.py3-none-any.whl (298 kB) +Installing collected packages: Werkzeug + Attempting uninstall: Werkzeug + Found existing installation: Werkzeug 2.0.0rc2 + Uninstalling Werkzeug-2.0.0rc2: + Successfully uninstalled Werkzeug-2.0.0rc2 +Successfully installed Werkzeug-1.0.1 +``` + +### pipdeptree for dependency tree + +The pip-tools' github page introduces also a dependency tree generation tool: [pipdeptree](https://github.com/jazzband/pip-tools#other-useful-tools) which is also very nice: + +```bash +(venv)$ pipdeptree +Flask==1.0 + - click [required: >=5.1, installed: 7.1.2] + - itsdangerous [required: >=0.24, installed: 1.1.0] + - Jinja2 [required: >=2.10, installed: 2.11.3] + - MarkupSafe [required: >=0.23, installed: 1.1.1] + - Werkzeug [required: >=0.14, installed: 1.0.1] +pip-tools==5.5.0 + - click [required: >=7, installed: 7.1.2] + - pip [required: >=20.1, installed: 20.2.3] +pipdeptree==2.0.0 + - pip [required: >=6.0.0, installed: 20.2.3] +setuptools==49.2.1 +``` diff --git a/docs/posts/2021/2021-06-12-python-unittest-cheet-sheet.md b/docs/posts/2021/2021-06-12-python-unittest-cheet-sheet.md new file mode 100644 index 00000000..5e441347 --- /dev/null +++ b/docs/posts/2021/2021-06-12-python-unittest-cheet-sheet.md @@ -0,0 +1,559 @@ +--- +authors: +- copdips +categories: +- python +- unittest +comments: true +date: + created: 2021-06-12 +description: '' +--- + +# Python Unittest Cheet Sheet + +Python unittest and Pytest is a big deal, this post just gives some small & quick examples on how to use Python unittest framwork, especially with Pytest framework. This post is not finished yet. + + + +## pytest in Makefile + +```Makefile +# Makefile +# https://github.com/databrickslabs/dbx/blob/main/Makefile + +SHELL=/bin/bash +VENV_NAME := $(shell [ -d venv ] && echo venv || echo .venv) +PYTHON=${VENV_NAME}/bin/python +FOLDER_FOR_COV=module1_folder module2_folder +COVERAGE_THRESHOLD=80 + +test: + $(PYTHON) -m pytest tests/unit/ -v -s -n auto --cov ${FOLDER_FOR_COV} \ + --cov-report=html \ + --cov-report=term-missing:skip-covered \ + --cov-fail-under=$(COVERAGE_THERSHOLD) +``` + +## pytest \-\-pdb + +[https://docs.pytest.org/en/stable/usage.html#dropping-to-pdb-python-debugger-on-failures](https://docs.pytest.org/en/stable/usage.html#dropping-to-pdb-python-debugger-on-failures) + +This will invoke the Python debugger on every failure (or KeyboardInterrupt). + +## pytest \-\-pdb \-\-pdbcls=IPython.terminal.debugger:TerminalPdb + +[https://docs.pytest.org/en/stable/usage.html#using-the-builtin-breakpoint-function](https://docs.pytest.org/en/stable/usage.html#using-the-builtin-breakpoint-function) + +```bash +$ pytest --help | grep -i ipython + --pdbcls=IPython.terminal.debugger:TerminalPdb +``` + +`--pdbcls=IPython.terminal.debugger:Pdb` also opens a ipython session, [but without tab completion (readline)](https://ipython.readthedocs.io/en/latest/api/generated/IPython.core.debugger.html#IPython.core.debugger.Pdb). + +This will use ipdb instead of pdb. Can also be set by default in `pytest.ini`: + +```bash +[pytest] +addopts = --pdbcls=IPython.terminal.debugger:Pdb +``` + +PS: an alternatif: `pdbpp` (successor of `pytest-ipdb`) at: https://github.com/pdbpp/pdbpp + +## export PYTHONBREAKPOINT=ipdb.set_trace + +Another way to using ipdb in debugger is to set `export PYTHONBREAKPOINT=ipdb.set_trace`, and set a break point with `breakpoint()` (introduce in [Python 3.7](https://docs.python.org/3/library/functions.html#breakpoint)), then run test with `pytest -s`. + +!!! note + + `import pdb; pdb.set_trace()` won't drop in to ipdb session with this way. + +## jupyter notebook #%% Debug Cell (VSCode only) + +Aadd the `#%%` marker on a line, you will see a `Debug Cell` code lens. Should install the module jupyter at first. + +!!! warning + + Although we get the `Debug Cell`, it seems that it doesn't work in test, should do more research later. + +## sys.last_value, sys.last_type and sys.last_traceback + +[https://docs.pytest.org/en/stable/usage.html#dropping-to-pdb-python-debugger-on-failures](https://docs.pytest.org/en/stable/usage.html#dropping-to-pdb-python-debugger-on-failures) + +Note that on any failure the exception information is stored on sys.last_value, sys.last_type and sys.last_traceback. In interactive use, this allows one to drop into postmortem debugging with any debug tool. One can also manually access the exception information, for example: + +```python +# when pytest --pdb is stopping at a failure +>>> import sys + +>>> sys.last_traceback.tb_lineno +1641 + +>>> sys.last_traceback.tb_frame + + +>>> sys.last_value +AssertionError('assert result == "ok"',) + +>>> sys.last_type + + + + +``` + +## pytest \-\-trace + +[https://docs.pytest.org/en/stable/usage.html#dropping-to-pdb-python-debugger-at-the-start-of-a-test](https://docs.pytest.org/en/stable/usage.html#dropping-to-pdb-python-debugger-at-the-start-of-a-test) + +allows one to drop into the PDB prompt immediately at the start of each test via a command line option. + + + +## pytest \-\-disable-socket + +This is using a third party plugin [pytest-socket](https://github.com/miketheman/pytest-socket) to disable all network calls flowing through Python's socket interface. Unit test should not have any network calls, even any local file operations. + +To work with async: `pytest --disable-socket --allow-unix-socket` + +To allow specific hosts: `pytest --disable-socket --allow-hosts=127.0.0.1,8.8.8.8` +!!! warning + + Not easy with IPs other than 127.0.0.1, as you might need to open sockets to more IPs for intermediate connections. So normally just --allow-hosts=127.0.0.1 if you have a local service (database for e.g.) for the unit tests. + +!!! warning + + Pay extra attention to this [cavet](https://github.com/miketheman/pytest-socket#frequently-asked-questions). If you create another fixture that creates a socket usage that has a "higher" instantiation order, such as at the module/class/session, then the higher order fixture will be resolved first, and won't be disabled during the tests. + +## @pytest.mark + +[https://docs.pytest.org/en/stable/example/markers.html](https://docs.pytest.org/en/stable/example/markers.html) + +We can use `@pytest.mark.foo` decorator to add a marker (label) on any test, and use `pytest -m foo` to run the tests only with mark name is `foo`. +This method is often used by the pytest extensions to for example enable or disable the extension on some specific tests. Like [@pytest.mark.enable_socket for the pytest-socket extension](https://github.com/miketheman/pytest-socket#usage) + +Some people also use markers to categorize the tests, like `@pytest.mark.unit` for unit tests, and `@pytest.mark.integration` for integration tests, etc. +Personally, I dont like this because it forces to add the markers on every tests, it will be a very heavy work, and once you forget to add the markers, your tests wont be runned, and you will never discover it. The common usage (maybe I'm wrong) that I saw on github is just to put different categories' tests in different folders. + +We can also [marking the whole classs or modules](https://docs.pytest.org/en/stable/example/markers.html#marking-whole-classes-or-modules). + +## pytest -k expr + +[https://docs.pytest.org/en/stable/example/markers.html#using-k-expr-to-select-tests-based-on-their-name](https://docs.pytest.org/en/stable/example/markers.html#using-k-expr-to-select-tests-based-on-their-name) + +You can use the -k command line option to specify an expression which implements a substring match on the test names `or class names or file names` instead of the exact match on markers that -m provides. This makes it easy to select tests based on their names. + + + +You can use `and`, `or`, and `not`. + +```bash +$ pytest -k "send_http" -v +$ pytest -k "not send_http" -v +$ pytest -k "send_http or quick" -v +``` + +## @pytest.mark.xfail(strict=True, reason="") + +[https://docs.pytest.org/en/reorganize-docs/new-docs/user/xfail.html#strict-parameter](https://docs.pytest.org/en/reorganize-docs/new-docs/user/xfail.html#strict-parameter) + +Having the xfail marker will still run the test but won’t report a traceback once it fails. Instead terminal reporting will list it in the “expected to fail” (`XFAIL`) section. If the test doesn’t fail it will be reported as “unexpectedly passing” (`XPASS`). set strict=True to ensure `XPASS` (unexpectedly passing) causes the tests to be recorded as a failure. + +```python + +@pytest.mark.xfail(strict=True, reason="") +def test_function(): + ... + + + +``` + +## @pytest.mark.parametrize + +[https://docs.pytest.org/en/stable/example/parametrize.html](https://docs.pytest.org/en/stable/example/parametrize.html) + +I put `@pytest.mark.parametrize` out of `@pytest.mark` because they're really different. In fact, I discovered pytest from this functionnality. + +```python +@pytest.mark.parametrize( + "a, b, expected", + [ + (1, 2, 3), + (3, 3, 6), + ], +) +def test_sum(a, b, expected): + total = a + b + assert total == expected +``` + +### Apply indirect on particular arguments + +[https://docs.pytest.org/en/stable/example/parametrize.html#apply-indirect-on-particular-arguments](https://docs.pytest.org/en/stable/example/parametrize.html#apply-indirect-on-particular-arguments) + +Very often parametrization uses more than one argument name. There is opportunity to apply indirect parameter on particular arguments. It can be done by passing list or tuple of arguments’ names to indirect. In the example below there is a function test_indirect which uses two fixtures: x and y. Here we give to indirect the list, which contains the name of the fixture x. The indirect parameter will be applied to this argument only, and the value a will be passed to respective fixture function. + + + +if `indirect=True`, both `x` and `y` fixtures will be used, if only `indirect=["x"]`, then only the fixture `x` will be used, and `y` will be considered as a standard var name. + +```python +# content of test_indirect_list.py + +import pytest + + +@pytest.fixture(scope="function") +def x(request): + return request.param * 3 + + +@pytest.fixture(scope="function") +def y(request): + return request.param * 2 + + +@pytest.mark.parametrize("x, y", [("a", "b")], indirect=["x"]) +def test_indirect(x, y): + assert x == "aaa" + assert y == "b" +``` + +## side_effect functions and iterables + +[https://docs.python.org/3/library/unittest.mock-examples.html#side-effect-functions-and-iterables](https://docs.python.org/3/library/unittest.mock-examples.html#side-effect-functions-and-iterables) + +We used to use side_effect to force a mock object to raise an exception. But we can also use side_effect to define different return values. This is useful when we have a same mock function used multiple times in a testing function, and this mock function should return different values. + +**functions:** + +```python +>>> vals = {(1, 2): 1, (2, 3): 2} +>>> def side_effect(*args): +... return vals[args] +... +>>> mock = MagicMock(side_effect=side_effect) +>>> mock(1, 2) +1 +>>> mock(2, 3) +2 +``` + +**iterables:** + +```python +>>> mock = MagicMock(side_effect=[4, 5, 6]) +>>> mock() +4 +>>> mock() +5 +>>> mock() +6 +``` +## mock any class with Mock + +```python +from dataclasses import dataclass +from unittest.mock import Mock + + +@dataclass +class A: + name: str + + +@dataclass +class B: + name: str + + +@dataclass +class InventoryItem: + a: A + b: B + + +def test_class_inventory_item(): + mock_inventory_item = InventoryItem(*[Mock() for _ in range(2)]) + + # or using inspect to get dynamically the class parameters count + from inspect import signature + mock_inventory_item = InventoryItem(*[Mock() for _ in range(len(signature(InventoryItem).parameters))]) +``` + +## monkeypatch + +[monkeypatch](https://docs.pytest.org/en/stable/monkeypatch.html) is a pytest native fixture, all modifications will be undone after the requesting test function or fixture has finished. + +### Monkeypatching functions or the property of a class + +https://docs.pytest.org/en/stable/monkeypatch.html#simple-example-monkeypatching-functions + +Very similar to Python standard lib `unittest.mock.patch` decorator since Python 3, but `monkeypatch` is a fixture. Some people find `monkeypatch` is less effort to write than `unittest.mock.patch`. Ref. https://github.com/pytest-dev/pytest/issues/4576 + +To use the native `unittest.mock.patch`, use the [`wraps` parameter](https://stackoverflow.com/a/59460964/5095636): + +```python +# replace function bar of module x by another function fake_bar with unittest.mock.patch +# we can assert the mocked function with mock_bar +from unittest.mock import patch + +def foo(arg1, arg2): + r = bar(arg1) + +def test_foo(): + with patch("x.bar", wraps=fake_bar) as mock_bar: + actual = foo(arg1, arg2) + assert actual == expected + mock_bar.assert_called_once_with(arg1) +``` + +```python +# replace function bar of module x by another function fake_bar with monkeypatch +# we cannot assert the mocked function, but we dont need to give the x module in full string format. + +def foo(arg1, arg2): + r = bar(arg1) + +def test_foo(monkeypatch): + monkeypatch.setattr(x, "bar", fake_bar) +``` + +```python +# replace function bar of module x by another function fake_bar with pytest-mock +# we assert the mocked function + +def foo(arg1, arg2): + r = bar(arg1) + +def test_foo(monkeypatch): + mock_bar = mocker.patch("x.bar", wraps=fake_bar) +``` + +!!! note + + There's also a plugin `pytest-mock`, which provides `spy` and `stub` utilities. + +!!! note + + The `wraps` parameter in the native `unittest.mock.patch` can also be used to [spy function](https://stackoverflow.com/a/43065411/5095636), if you don't want to use `pytest-mock.spy`. + +```python +monkeypatch.setattr(obj, name, value, raising=True) +monkeypatch.delattr(obj, name, raising=True) +``` + +### Monkeypatching environment variables + +[https://docs.pytest.org/en/stable/monkeypatch.html#monkeypatching-environment-variables](https://docs.pytest.org/en/stable/monkeypatch.html#monkeypatching-environment-variables) + +!!! note + + Can be replaced by python native unittest.mock [@patch.dict('os.environ', {'newkey': 'newvalue'})](https://docs.python.org/3/library/unittest.mock.html#unittest.mock.patch.dict) + +```python +# contents of our test file e.g. test_code.py +import pytest + + +@pytest.fixture +def mock_env_user(monkeypatch): + monkeypatch.setenv("USER", "TestingUser") + + +@pytest.fixture +def mock_env_missing(monkeypatch): + monkeypatch.delenv("USER", raising=False) + + +# notice the tests reference the fixtures for mocks +def test_upper_to_lower(mock_env_user): + assert get_os_user_lower() == "testinguser" + + +def test_raise_exception(mock_env_missing): + with pytest.raises(OSError): + _ = get_os_user_lower() +``` +### monkeypatch with parametrize + +As said above monkeypatch is a fixture, so we can use [pytest-lazy-fixture](https://github.com/tvorog/pytest-lazy-fixture) to parametrize the fixtures. I cannot remember where is the link, in fact on one page from pytest official doc, it says that pytest cannot do it for the moment, that's why `pytest-lazy-fixture` is introduced here. +### + +It is worth saying that following monkeypatch on env won't work: + +```python +# file a.py +TEST_USER = os.getenv("TEST_USER") + +def get_test_user(): + return(TEST_USER) + + +# file test_a.py +import pytest + +from a import get_test_user + +@pytest.fixture +def mock_env_user(monkeypatch): + monkeypatch.setenv("TEST_USER", "TestingUser") + +def test_get_test_user(mock_env_user): + assert get_test_user() == "testinguser" +``` + +The test will fail, because the line `TEST_USER = os.getenv("TEST_USER")` in the file `a.py` is always imported before `mock_env_user` by `test_a.py`, `from a import get_test_user` is at the beginning of the test file. During the import, at this moment, the env var `TEST_USER` doesn't exist yet in os, it will always have the value `None`. To fix this problem, we need to put the `os.getenv` into `get_test_user` like: + +```python +# file a.py + +def get_test_user(): + TEST_USER = os.getenv("TEST_USER") + return(TEST_USER) +``` + +### Monkeypatching dictionaries + +!!! note + + Can be replaced by python native unittest.mock [@patch.dict()](https://docs.python.org/3/library/unittest.mock.html#unittest.mock.patch.dict) + +```python +# patch one key at each patch +monkeypatch.setitem(app.DEFAULT_CONFIG, "user", "test_user") +monkeypatch.setitem(app.DEFAULT_CONFIG, "database", "test_db") +monkeypatch.delitem(app.DEFAULT_CONFIG, "name", raising=False) +``` +### Modifying sys.path + +```python +monkeypatch.syspath_prepend(path) +``` + +### Changing the context of the current working directory during a test + +```python +monkeypatch.chdir(path) +``` + +## pytest-xdist to run tests in parallel + +[https://github.com/pytest-dev/pytest-xdist](https://github.com/pytest-dev/pytest-xdist) + +Especially useful when your tests are unit tests for exmaple, which dont have dependencies from one with each other, and don't share any changing data, which means your tests should be stateless. + +```bash +# run on 4 CPUs +pytest -n 4 + +# run on a number of CPUs calculated automatically by the python built-in multiprocessing module +pytest -n auto + +# run on a number of CPUs calculated automatically by the module psutil, you need such module if you have logical cpus as well as certain imposed limitations (like container runtimes with cpu limits) +# ref. https://stackoverflow.com/a/14840102/5095636 +# ref. https://docs.python.org/3/library/multiprocessing.html#multiprocessing.cpu_count +pip install pytest-xdist[psutil] +pytest -n auto +``` + +!!! note + + There's another module [pytest-parallel](https://github.com/browsertron/pytest-parallel), the author says his module can run the tests in concurrency, and very efficient in integration tests, which tests might be stateful or sequential. I haven't tested yet, so cannot say anything here. + +## speccing + +[https://docs.python.org/3/library/unittest.mock.html#autospeccing](https://docs.python.org/3/library/unittest.mock.html#autospeccing) + +mock.patch returns a mock object, a mock object can have whatever atrributes and methods. + +`mock.asssert_called_once_with(4, 5, 6)` doesn't fail as shown as follows: + +```bash +>>> mock = Mock(name='Thing', return_value=None) +>>> mock(1, 2, 3) +>>> mock.asssert_called_once_with(4, 5, 6) + +``` + +### simple speccing + +```bash +>>> from urllib import request +>>> mock = Mock() +>>> mock.asssert_called_with() + + +# using simple speccing, mock.asssert_called_with() is detected as an error +>>> mock = Mock(spec=request.Request) +>>> mock.asssert_called_with() +--------------------------------------------------------------------------- +AttributeError Traceback (most recent call last) +... +AttributeError: Mock object has no attribute 'asssert_called_with' + +# still using simple speccing, mock.data.asssert_called_with() is detected as an mocked method, no errors. +# so simple speccing doesnt' work for nested objects +>>> mock.data.asssert_called_with() + +``` + +### auto-speccing + +#### Using patch(autospec=True) + +```bash +>>> from urllib import request +>>> patcher = patch('__main__.request', autospec=True) +>>> mock_request = patcher.start() + +>>> request is mock_request +True + +# mock_request.Request has the spec='Request' now +>>> mock_request.Request + + +# the real request object doesn't have the static data attribute, so autospecced object doesn't have it neither. +>>> mock_request.data +Traceback (most recent call last): + File "", line 1, in + File "/usr/lib/python3.8/unittest/mock.py", line 637, in __getattr__ + raise AttributeError("Mock object has no attribute %r" % name) +AttributeError: Mock object has no attribute 'data' +``` + +#### Using create_autospec() + +```bash +>>> from urllib import request +>>> mock_request = create_autospec(request) +>>> mock_request.Request('foo', 'bar') + +``` + +!!! warning + + autospec works well on methods and static attributes, but a serious problem is that it is common for instance attributes to be created in the __init__() method and not to exist on the class at all. autospec can’t know about any dynamically created attributes and restricts the api to visible attributes. This is why autospeccing is not the patch default behaviour. Search the above phrase in the [python official doc](https://docs.python.org/3/library/unittest.mock.html#autospeccing) to get more details and solutions. + +## unittest.mock.ANY + +[https://docs.python.org/3/library/unittest.mock.html#any](https://docs.python.org/3/library/unittest.mock.html#any) + +```bash +>>> from unittest.mock import Mock +>>> mock = Mock(return_value=None) +>>> mock('foo', bar=object()) +>>> mock.assert_called_once_with('foo', bar=ANY) +``` + +```bash +>>> from unittest.mock import Mock, call +>>> m = Mock(return_value=None) +>>> m(1) +>>> m(1, 2) +>>> m(object()) +>>> m.mock_calls == [call(1), call(1, 2), ANY] +``` diff --git a/docs/posts/2021/2021-06-27-python-datetime-utc-now.md b/docs/posts/2021/2021-06-27-python-datetime-utc-now.md new file mode 100644 index 00000000..7e89cbbf --- /dev/null +++ b/docs/posts/2021/2021-06-27-python-datetime-utc-now.md @@ -0,0 +1,36 @@ +--- +authors: +- copdips +categories: +- python +- datetime +comments: true +date: + created: 2021-06-27 + updated: 2022-09-05 +description: '' +--- + +# Python datetime utcnow + +Previously, when I needed a real UTC now with [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format, I used to use the [strftime](https://docs.python.org/3.9/library/datetime.html#datetime.date.strftime) function or the [pytz](https://pypi.org/project/pytz/) module. But recently I just found that Python at least since [v3.5](https://docs.python.org/3.5/library/datetime.html#datetime.datetime.utcnow) has already provide it with built-in module: `datetime.now(timezone.utc)`, and this is also the preferred method over [`datatime.utcnow()`](https://docs.python.org/3/library/datetime.html#datetime.datetime.utcnow) + +PS: `datetime.fromisoformat()` is release with python v3.7 + +```bash +>>> from datetime import datetime, timezone + +>>> datetime.utcnow() +datetime.datetime(2021, 6, 27, 17, 31, 14, 410011) +>>> datetime.utcnow().isoformat() +'2021-06-27T17:31:14.410200' +>>> datetime.fromisoformat(datetime.utcnow().isoformat()) +datetime.datetime(2021, 6, 27, 17, 31, 14, 415153) + +>>> datetime.now(timezone.utc) +datetime.datetime(2021, 6, 27, 17, 31, 14, 419667, tzinfo=datetime.timezone.utc) +>>> datetime.now(timezone.utc).isoformat() +'2021-06-27T17:31:14.425507+00:00' +>>> datetime.fromisoformat(datetime.now(timezone.utc).isoformat()) +datetime.datetime(2021, 6, 27, 17, 31, 14, 431368, tzinfo=datetime.timezone.utc) +``` diff --git a/docs/posts/2021/2021-09-04-python-asyncio.md b/docs/posts/2021/2021-09-04-python-asyncio.md new file mode 100644 index 00000000..9767678c --- /dev/null +++ b/docs/posts/2021/2021-09-04-python-asyncio.md @@ -0,0 +1,25 @@ +--- +authors: +- copdips +categories: +- python +- async +comments: true +date: + created: 2021-09-04 +description: '' +--- + +# Python Asyncio Study notes + +## concurrent.futures + +The [concurrent.futures](https://docs.python.org/3.9/library/concurrent.futures.html) is a high-level abstraction for the `threading` and `multiprocessing` modules. + +```mermaid! +graph LR + concurrent.futures --->| on top of | threading + concurrent.futures --->| on top of | multiprocessing + threading --->| on top of | \_thread + click concurrent.futures "https://docs.python.org/3.9/library/concurrent.futures.html" _blank +``` diff --git a/docs/posts/2022/2022-01-22-azure-pipeline-predefined-variables.md b/docs/posts/2022/2022-01-22-azure-pipeline-predefined-variables.md new file mode 100644 index 00000000..25ec7597 --- /dev/null +++ b/docs/posts/2022/2022-01-22-azure-pipeline-predefined-variables.md @@ -0,0 +1,66 @@ +--- +authors: +- copdips +categories: +- azure +- cicd +comments: true +date: + created: 2022-01-22 + updated: 2022-01-24 +description: '' +--- + +# Azure pipeline predefined variables +The [official doc](https://docs.microsoft.com/en-us/azure/devops/pipelines/build/variables) gives an explanation of all the predefined variables, but it lacks of some concret examples. Hereunder some examples for my preferred variables. + +## Access the predefined variables + +To access the variables value in YAML pipeline, we can use 2 methods: + +1. `$(System.PullRequest.SourceBranch)` : the standard way to access pipeline variables. +2. `$SYSTEM_PULLREQUEST_SOURCEBRANCH` : most of the pipeline variables are mapped to the pipeline machine environment variables in upper snake case. + +## Variables upon Git events + +Suppose we create a new branch named `new_branch`, and create a pull request (with id `123`) from the new branch `new_branch` to the `main` branch. +During the pipeline, we can see following predefined variables in different GIT events. + +!!! note + + Check [here](https://copdips.com/2023/09/github-actions-variables.html#variables-upon-git-events) for variables upon git events in Github Actions. + +| variable name \ git action | on push | on pull request | on merge | on manual trigger | +| --------------------------------- | ------------------------- | ------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------- | +| Build.SourceBranch | refs/heads/new_branch | refs/pull/123/merge | refs/heads/main | refs/heads/new_branch | +| Build.SourceBranchName | new_branch | merge | main | new_branch | +| Build.SourceVersionMessage | {the last commit message} | Merge pull request 123 from new_branch into main | Merged PR 123: {pull request title}
**- It's a way to determin this merge is from which PR**
**- We can also change the default message when merging the PR** | {the last commit message} | +| Build.Reason | IndividualCI | PullRequest | IndividualCI | Manual | +| System.Pullrequest.SourceBranch | VAR_NOT_EXISTS | refs/heads/new_branch | VAR_NOT_EXISTS | VAR_NOT_EXISTS | +| System.Pullrequest.TargetBranch | VAR_NOT_EXISTS | refs/heads/main | VAR_NOT_EXISTS | VAR_NOT_EXISTS | +| System.Pullrequest.PullRequestId | VAR_NOT_EXISTS | 123 | VAR_NOT_EXISTS | VAR_NOT_EXISTS | +| System.PullRequest.SourceCommitId | VAR_NOT_EXISTS | the last commit number in pull request | VAR_NOT_EXISTS | VAR_NOT_EXISTS | + +## Variables not varying upon triggering Git action + +### System.AccessToken + +[System.AccessToken](https://docs.microsoft.com/en-us/azure/devops/pipelines/build/variables?view=azure-devops&tabs=yaml#systemaccesstoken) is a [SecretVariable](https://docs.microsoft.com/en-us/azure/devops/pipelines/process/variables?view=azure-devops&tabs=yaml%2Cbatch#secret-variables), which is in fact a PAT token with limited 1 hour of lifetime by default, and is about to be expired [5 minutes before the end of the lifetime](https://github.com/Azure/azure-sdk-for-net/blob/4162f6fa2445b2127468b9cfd080f01c9da88eba/sdk/mgmtcommon/AppAuthentication/Azure.Services.AppAuthentication/AppAuthenticationResult.cs#L41-L45). + +- **User name** + The access token is bound to a build service account, which name should be in this format: `{projectName} Build Service ({organizationName})`. So it's required to set necessary permissions on this account. For example, to be able to publish a Python wheel package to Azure Artifacts, it needs the `AddPackage` permission, we can set the build service account as a contributor to the corresponding Artifacts feed's permission tab to get this permission. +- **Basic auth** + If we need to use this PAT to create the base64 string, the user name for this PAT should be kept empty, which is in the format of `:$(System.AccessToken)`, to [convert it to base64](https://docs.microsoft.com/en-us/azure/devops/organizations/accounts/use-personal-access-tokens-to-authenticate?view=azure-devops&tabs=preview-page#use-a-pat), use: `printf "%s"":$(System.AccessToken)" | base64`, or `echo -n ":$(System.AccessToken)" | base64` with `-n`. When using with curl, it should be something like `curl -u :$(System.AccessToken)`, the user name part is empty. or user an basic auth header like `{"Authorization": "Basic {:$(System.AccessToken) in base64 format}"}`. +- **OAtuh** + Besides the above basic auth (it's secure as the password is a PAT with limited lifetime, not a real clear password), we can also use OAuth, with header `{"Authorization": "Bearer $(System.AccessToken)"}`, it's not enabled by defaut, we should enable the OAuth by checking the box `Allow scripts to access OAuth token` from `Realeses / Tasks / Agent job (Run on Agent)` or from `Pipelines / Tasks / Agent job (Run on Agent)`. And we need to create a task in advance in order to see the `Tasks` menu. If we don't enbale the option, and use Bearer header directly, we will get an API resposne code `203`, with the reason `Non-Authoritative Information`. +- See also [**job access token**](https://docs.microsoft.com/en-us/azure/devops/pipelines/process/access-tokens?view=azure-devops&tabs=yaml). + +### Agent.OS + +[Agent.OS](https://docs.microsoft.com/en-us/azure/devops/pipelines/build/variables?view=azure-devops&tabs=yaml#agent-variables-devops-services): Just to check which OS running the pipeline. + +## Variables to be set by user + +### System.Debug + +Add a new variable with the name [System.Debug](https://docs.microsoft.com/en-us/azure/devops/pipelines/build/variables?view=azure-devops&tabs=yaml#systemdebug) and value `true` for debugging. diff --git a/docs/posts/2022/2022-02-09-azure-pipeline-reuse-variables-in-template-from-another-repository.md b/docs/posts/2022/2022-02-09-azure-pipeline-reuse-variables-in-template-from-another-repository.md new file mode 100644 index 00000000..5224104b --- /dev/null +++ b/docs/posts/2022/2022-02-09-azure-pipeline-reuse-variables-in-template-from-another-repository.md @@ -0,0 +1,102 @@ +--- +authors: +- copdips +categories: +- azure +- cicd +comments: true +date: + created: 2022-02-09 +description: '' +--- + +# Azure pipeline reuse variables in template from another repository + +## Context + +In my project, I have several Azure pipelines that share some same variables, instead of declaring them in each pipeline, I would like to refactor it by using some central places to store the shared variables. + +I can split the variables into 3 groups: + +1. organization level variables: + organization name, tenant id, etc. +2. project level variables: + project name, resouces group name, keyvault name, project support email, etc. +3. repository level variables: + module name, repository support email, etc. + +Suppose I'm writing an Azure pipeline called `cicd.yml` for the `repositoryA` located at: `organizationA/projectA/repositoryA`, I will save the above 3 groups of variables to 3 places: + +1. organization level variables -> to a new repository outside of the project, for e.g. `organizationA/sharedProject/sharedRepository` +2. project level variables -> to a new repository inside the same project, for e.g. `organizationA/projectA/sharedRepository` +3. repository level variables -> to the same repository: `organizationA/projectA/repositoryA` + +By checking following two official docs (in fact in the same doc :-)) : [Variable reuse](https://docs.microsoft.com/en-us/azure/devops/pipelines/process/templates?view=azure-devops#variable-reuse), [Use other repositories](https://docs.microsoft.com/en-us/azure/devops/pipelines/process/templates?view=azure-devops#use-other-repositories), the file content of each variable group could be: + +## organization level variables + +```yml +# file: organizationA/sharedProject/sharedRepository/.azure-pipelines/variables/organization_variables.yml + +variables: + organizationName: xxx +``` + +## project level variables + +```yml +# file: organizationA/projectA/sharedRepository/.azure-pipelines/variables/project_variables.yml + +variables: + - template: .azure-pipelines/variables/organization_variables.yml@sharedProject_sharedRepository + - name: myProjectVar + value: $(organizationName)_abc +``` + +## repository level variables + +```yml +# file: organizationA/projectA/repositoryA/.azure-pipelines/variables/repository_variables.yml + +variables: + - template: .azure-pipelines/variables/project_variables.yml@projectA_sharedRepository + - name: myRepositoryVar + value: $(myProjectVar)_abc +``` + +## root cicd file + +```yml +# file: organizationA/projectA/repositoryA/.azure-pipelines/cicd.yml + +# repository type = git means Azure DevOps repository as per https://docs.microsoft.com/en-us/azure/devops/pipelines/repos/multi-repo-checkout?view=azure-devops#specify-multiple-repositories + +resources: + repositories: + - repository: sharedProject_sharedRepository + type: git + name: sharedProject/sharedRepository + - repository: projectA_sharedRepository + type: git + name: projectA/sharedRepository + +trigger: xxx + +variables: + - template: variables/repository_variables.yml + - name: myRepositoryVar + value: xxx + +pool: + vmImage: xxx + +steps: + - script: | + echo $(myRepositoryVar) + displayName: test repositry level variables +``` + +!!! note + + Note: We cannot put the `resources` part elsewhere, it must be declared in the [root pipeline file](https://developercommunity.visualstudio.com/t/unexpected-value-resources-in-yaml-template/728151#TPIN-N782729). Otherwise, the pipeline might throw the `Unexpected value 'resources'` error. There's some black magic that the variables templates defined in other repositories (for e.g. `project_variables.yml`) recognize well the `sharedProject_sharedRepository` repository resource defined in the repository hosting the `cicd.yml` file. + diff --git a/docs/posts/2022/2022-02-19-azure-pipeline-checkout-repository-from-another-project.md b/docs/posts/2022/2022-02-19-azure-pipeline-checkout-repository-from-another-project.md new file mode 100644 index 00000000..8344cee0 --- /dev/null +++ b/docs/posts/2022/2022-02-19-azure-pipeline-checkout-repository-from-another-project.md @@ -0,0 +1,60 @@ +--- +authors: +- copdips +categories: +- azure +- cicd +comments: true +date: + created: 2022-02-19 + updated: 2022-09-16 +description: '' +--- + +# Azure pipeline checkout repository from another project + +## Context + +This post can be an extend to my previous [post on variables and templates reuse](https://copdips.com/2022/02/azure-pipeline-reuse-variables-in-template-from-another-repository.html) + +In fact, in addition to the variables and templates, I also need to reuse some non native Azure pipeline yaml files, for example some Python scripts defined in the shared template. If we use the same technic shown by the previous blog, the pipeline will throw error saying that it cannot find the Python script. This is because we need to checkout the remote repository at first before using the native pipeline yaml files. + +## Checkout repository from another project + +By default, each pipeline run has a temproray token of the [project build service account](https://docs.microsoft.com/en-us/azure/devops/pipelines/process/access-tokens?view=azure-devops&tabs=yaml#scoped-build-identities), this account name should be in the format of: `[Project name] Build Service ([Organization name])`, we want to use this token to checkout the remote repository. + +We can also use a third account PAT to perform the checkout, but I won't explain here because we need to save the PAT somewhere which is less convenient than the default build service account. We should use the build service account as much as possible. + +If we do nothing, just add the `checkout` step in the pipeline as shown [here](https://docs.microsoft.com/en-us/azure/devops/pipelines/repos/multi-repo-checkout?view=azure-devops#repository-resource-definition), the pipeline run will throw the error like below: + + > "remote: TF401019: The Git repository with name or identifier {remote repository name} does not exist or you do not have permissions for the operation you are attempting." + +There might be many [reasons](https://docs.microsoft.com/en-us/azure/devops/pipelines/repos/azure-repos-git?view=azure-devops&tabs=yaml#failing-checkout) that can trigger this error, but for this case, this is because since [May 2020](https://docs.microsoft.com/en-us/azure/devops/pipelines/process/access-tokens?view=azure-devops&tabs=yaml#limit-job-authorization-scope-to-referenced-azure-devops-repositories), all the new projects have the option `Limit job authorization scope to current project for non-release pipelines` enabled. Which means by default the built-in build service account of a project A cannot access anything inside the project B. The cross-project access is denied by default. + +Disable this option makes the checkout of remote repository worked, but it opens also a very big [security issue](https://docs.microsoft.com/en-us/azure/devops/pipelines/process/access-tokens?view=azure-devops&tabs=yaml#job-authorization-scope). So we should **NEVER** disable it. + +### Readers group to the whole target project + +My first try was going the security tab of the remote repository (projectB's Project Settings -> Repos -> Repositories -> sharedRepo), and grant the `source project build service account` the read permission on it, I got the same error. Then, I granted the same permission at all repositories level, same error. Finally, I added the `source project build service account` into the `Readers group` of the shared project (Project Settings -> Permissions -> Groups -> Readers), and this time, it worked. + +!!! warning + + So the whole blog can be summarized to the above phrase by using the `Readers` group. But please be aware that, as it's read-only access to the whole target project, which means the `source project build service account` has the **read access to all the repositories inside the target project**. If you want to grant read access **only to a single repository**, you need to add the `source project build service account` to all the other repositories security tabs and set the **Read permission to Deny**. As said in above first try, the inverse way doesn't work as the time of writing this blog. + +### Create read access to the target repository + +This method is shown [here](https://docs.microsoft.com/en-us/azure/devops/pipelines/repos/azure-repos-git?view=azure-devops&tabs=yaml#failing-checkout) in the last use case, which is: +- If the scope is project? => Yes +- Is the repo in the same project as the pipeline? => No +- Is your pipeline in a public project? => No + +You need to take additional steps to grant access. Let us say that your pipeline exists in project A and that your repository exists in project B. +1. Go to the project settings of the project in which the repository exists (B). Select Repos -> Repositories -> specific repository. +2. Add your-project-name Build Service (your-collection-name) to the list of users, where your-project-name is the name of the project in which your pipeline exists (A). +3. Give Read access to the account. +4. (Update 2022-09-16)(refer to this [doc](https://docs.microsoft.com/en-us/azure/devops/pipelines/process/access-tokens?view=azure-devops&tabs=yaml#configure-permissions-for-a-project-to-access-another-project-in-the-same-project-collection)) Create a new group in the target project permissions tab. Add the `source project build service account` into this group. Grant this new group with the `View project-level information` permission, or you can also grant this permission only to the added source project build service account. + +!!! note + + In fact, the default `Readers` group has also this permission, but it's not straightforward to figure out that we must need this permission in addition to the `Read` permission at the repository level. + diff --git a/docs/posts/2022/2022-03-09-azure-pipeline-variables-and-parameters.md b/docs/posts/2022/2022-03-09-azure-pipeline-variables-and-parameters.md new file mode 100644 index 00000000..cf356e25 --- /dev/null +++ b/docs/posts/2022/2022-03-09-azure-pipeline-variables-and-parameters.md @@ -0,0 +1,127 @@ +--- +authors: +- copdips +categories: +- azure +- cicd +comments: true +date: + created: 2022-03-09 + updated: 2022-06-16 +description: '' +--- + +# Azure pipeline variables and parameters + +## Variable + +### Variable scope + +When we set variables [from a script](https://docs.microsoft.com/en-us/azure/devops/pipelines/process/variables?view=azure-devops&tabs=yaml%2Cbatch#set-a-job-scoped-variable-from-a-script), the new variable is only available from the next step, not the step where the variable is defined. + +```yml +variables: + sauce: orange +steps: +# Create a variable +- bash: | + echo "##vso[task.setvariable variable=sauce]crushed tomatoes" # remember to use double quotes + echo inside the same step, sauce: $(sauce) + +# Use the variable +# "$(sauce)" is replaced by the contents of the `sauce` variable by Azure Pipelines +# before handing the body of the script to the shell. +- bash: | + echo from the next step, sauce: $(sauce) +``` + +The result will be: + +```bash +inside the same step, sauce: orange +from the next step, sauce: crushed tomatoes +``` + +### Json Variable + +Parameter can have object type like dict in Python, but not the case for variable. The workaround is to assign a raw json string to variable, and using tools like [jq](https://stedolan.github.io/jq/) to handle it during runtime. The json string variable must follow some special format, the double quotes must be escaped, and the whole string must be enclosed by the single quote. + +```yml +aJsonVar: '{ \"dev\": \"foo\", \"prd\": \"bar\" }' +``` + +## Parameter + +### String parameter + +For string parameter with an empty string `""` as default value, in bash script task, we can use `if [[ -n $VAR_NAME ]]; then` to handle it. + +`-n` in Linux returns true (0) if exists, and not empty. + +```yaml +parameters: + - name: paramName + type: string + default: "" + +steps: + - scripts: | + if [[ -n $PARAM_NAME ]]; then + echo PARAM_NAME is set with a value: $PARAM_NAME + fi + displayName: check paramName + failOnStderr: true + env: + PARAM_NAME: ${{ parameters.paramName }} +``` + +### Boolean parameter + +```yml +parameters: +- name: myBoolean + type: boolean + default: true +``` + +- In pipeline YAML syntax, we compare the value by YAML's Boolean type `true` or `false` +- In bash script, we should compare it with string format of `True` or `False` + +### Object parameter + +Parameter has a type of [`object`](https://docs.microsoft.com/en-us/azure/devops/pipelines/process/runtime-parameters?view=azure-devops&tabs=script#parameter-data-types) which can take any YAML structure. If it's related to a `array/list` type, we can use `${{ each element in paramters.elements}}` to loop through it, but if it's related to a `mapping/dict` type, it will not be easy as Microsoft [hasn't provided any official docs](https://github.com/microsoft/azure-pipelines-yaml/issues/427) (and [this one](https://stackoverflow.com/a/59987335/5095636)) on how to use complex parameter with the pipeline native syntax, and my tests with different approaches failed too. Hopefully, for `mapping/dict` object type of parameter, we can workaround it by doing some transformation in a script task with [convertToJson](https://docs.microsoft.com/en-us/azure/devops/pipelines/process/expressions?view=azure-devops#converttojson) like: `echo '${{ convertToJson(parameters.elements) }}'` + +!!! warning + + Must use `single quotes` around the `convetToJson` expression. If we use `double quotes`, the output will [remove the double quotes from the json data](https://github.com/MicrosoftDocs/azure-devops-docs/issues/11983#issuecomment-1055651836). + +### Loop through parameters + +We can [loop through parameters](https://docs.microsoft.com/en-us/azure/devops/pipelines/process/runtime-parameters?view=azure-devops&tabs=script#loop-through-parameters) with: + +```yaml +steps: +- ${{ each parameter in parameters }}: + - script: echo ${{ parameter.Key }} + - script: echo ${{ parameter.Value }} +``` + +The above example provided by the official doc loops through the parameters script by script. +In the pipeline, we will see as many tasks as the number of parameters which looks a bit heavy, hereunder how to iterate all the parameters in a single script. + +```yaml +# suppose the blow pipeline is defined in a template which takes the parameter with name `parameters`, so we can reuse it in any other pipelines. +parameters: + - name: parameters + displayName: parameters + type: object + +steps: + - script: | + parameters_in_json=$(echo '${{ convertToJson(parameters.parameters) }}' | jq -c) + echo "##vso[task.logissue type=warning]parameters: $parameters_in_json" + displayName: echo parameters +``` + +The above example uses only one script to iterate all the parameters and pipe it to [jq](https://stedolan.github.io/jq/), as long as jq can handle the parameters, we can handle everything. +Here, we use `jq -c` to convert all the parameters into a single line json, which will be better displayed by `##vso[task.logissue type=warning]`, as it takes only one line. diff --git a/docs/posts/2022/2022-03-27-manage-azure-databricks-service-principal.md b/docs/posts/2022/2022-03-27-manage-azure-databricks-service-principal.md new file mode 100644 index 00000000..69eec681 --- /dev/null +++ b/docs/posts/2022/2022-03-27-manage-azure-databricks-service-principal.md @@ -0,0 +1,89 @@ +--- +authors: +- copdips +categories: +- azure +- databricks +comments: true +date: + created: 2022-03-27 +description: '' +--- + +# Manage Azure Databricks Service Principal + +Most of Databricks management can be done from the GUI or [CLI](https://docs.databricks.com/dev-tools/cli/index.html), but for Azure Service Principal, we can only manage it by the [SCIM API](https://docs.databricks.com/dev-tools/api/latest/scim/scim-sp.html). There's an [open PR](https://github.com/databricks/databricks-cli/pull/311) for adding support of SCIM API in Databricks CLI, but the lastest update is back to the beginning of 2021. + +This post is to add some tips that not covered by the official API docs. + +## Patch Service Principal + +The official docs gives op `add`, `remove`, in fact, if you want to for example, update the `displayName` field of a SP, the op should be `add`: + +```json +{ + "schemas": [ + "urn:ietf:params:scim:api:messages:2.0:PatchOp" + ], + "Operations": [ + { + "op": "add", + "path": "displayName", + "value": "{newServicePrincipalName}" + } + ] +} +``` + +## Consistent fields across workspaces + +We could link multiple Databricks workspaces together. Below screenshot is an example of 3 linked workspaces. + +![azure-databricks-multiple-workspaces](../../assets/blog_images/2022-03-27-manage-azure-databricks-service-principal/azure-databricks-multiple-workspaces.png) + +Please be aware that **each workspace has its own API url**. + +Let's see the example of the output of the GET Service Principal endpoint, where the applicationId is `11111111-0000-0000-0000-111111111111`: + +```json +{ + "displayName": "foo", + "externalId": "22222222-0000-0000-0000-222222222222", + "groups": [ + { + "display": "group1", + "type": "direct", + "value": "111", + "$ref": "Groups/111" + }, + { + "display": "group2", + "type": "indirect", + "value": "222", + "$ref": "Groups/222" + } + ], + "id": "123456789", + "entitlements": [ + { + "value": "allow-cluster-create" + }, + { + "value": "allow-instance-pool-create" + }, + { + "value": "workspace-access" + } + ], + "applicationId": "11111111-0000-0000-0000-111111111111", + "active": true +} +``` + +Although we have 3 different workspaces, the same Service Principal (applicationId) defined in these workspace shares some fields: + +- displayName +- id +- applicationId + +And among these 3 fields, you can only update the `displayName` field, the `id` and `applicationId` fileds are immutable. Which means if we change the `displayName` in one of the workspaces by using the PATCH SCIM API, we will get the the updated `displayName` in other workspaces by using the GET SCIM API. We [can not change](https://docs.databricks.com/dev-tools/api/latest/scim/scim-sp.html#update-service-principal-by-id-patch) `id` and `applicationId` fields, and both of them are the same across workspaces. diff --git a/docs/posts/2022/2022-04-03-azure-pipeline-checkout-multiple-repositories.md b/docs/posts/2022/2022-04-03-azure-pipeline-checkout-multiple-repositories.md new file mode 100644 index 00000000..e44cde13 --- /dev/null +++ b/docs/posts/2022/2022-04-03-azure-pipeline-checkout-multiple-repositories.md @@ -0,0 +1,627 @@ +--- +authors: +- copdips +categories: +- azure +- cicd +comments: true +date: + created: 2022-04-03 +description: '' +--- + +# Azure Pipeline Checkout Multiple Repositories + +This post will talk about some Azure pipeline predefined variables' values in a multiple repositories checkout situation. The official doc is [here](https://docs.microsoft.com/en-us/azure/devops/pipelines/repos/multi-repo-checkout?view=azure-devops). + +!!! note + + The examples given in this post is using Azure DevOps repositories and Azure pipeline Ubuntu agent. + +## Default Pipeline workspace structure + +When a pipeline starts, something is created inside the folder defined in the predefined variable [`$(Pipeline.Workspace)`](https://docs.microsoft.com/en-us/azure/devops/pipelines/build/variables?view=azure-devops&tabs=yaml#pipeline-variables-devops-services), this variable has the same value as `$(Agent.BuildDirectory)`, For example, when using the default Azure pipeline Ubuntu agent, the value is `/home/vsts/work/1`. + +At the very beginning of a pipeline run, you should the folder constructed like below: + +```bash +pwd +/home/vsts/work/1/s + +ls -lart /home/vsts/work/1 +total 24 +drwxr-xr-x 2 vsts docker 4096 Apr 3 12:52 b +drwxr-xr-x 2 vsts docker 4096 Apr 3 12:52 a +drwxr-xr-x 2 vsts docker 4096 Apr 3 12:52 TestResults +drwxr-xr-x 6 vsts docker 4096 Apr 3 12:52 . +drwxr-xr-x 4 vsts docker 4096 Apr 3 12:52 s +drwxr-xr-x 7 vsts root 4096 Apr 3 12:52 .. +``` + +- Folder `/home/vsts/work/1` for `Pipeline.Workspace`, `Agent.BuildDirectory`. +- Folder `/home/vsts/work/1/a` for `Build.ArtifactStagingDirectory`, `Build.StagingDirectory`. +- Folder `/home/vsts/work/1/b` for `Build.BinariesDirectory`. +- Folder `//home/vsts/work/1s` for `System.DefaultWorkingDirectory` or sometimes for `Build.SourcesDirectory`, `Build.Repository.LocalPath`. +- Folder `/home/vsts/work/1/TestResults` for `Common.TestResultsDirectory` + +!!! warning + + The value of `Build.SourcesDirectory`, `Build.Repository.LocalPath` could change [upon checkout policies](https://docs.microsoft.com/en-us/azure/devops/pipelines/build/variables?view=azure-devops&,tabs=yaml#build-variables-devops-services), so pay attention when using these two variables. + +!!! warning + + `System.DefaultWorkingDirectory` is very important too because its value will **never change** in whatever situation, and this is the default working directory when running the script task, we can confirm it by checking the result of the above `pwd` command. + +I will show these variables' value within different steps of 5 different pipelines: + +1. With self checkout and external repository checkout (most common) +2. Single self checkout with default path +3. Single self checkout with custom path +4. No self checkout but single external checkout with default path +5. No self checkout but single external checkout with custom path +6. No self checkout but multiple external checkout + +## With self checkout and external repository checkout + +```yml +resources: + repositories: + - repository: another_repo + type: git + name: AzureDevOpsProjectName/another_repo + +steps: + - checkout: self + persistCredentials: true # persists cred to perform some git remote commands like git push --tags + path: $(Build.Repository.Name) + + - checkout: another_repo + path: another_repo + + - script: | + cp "$BUILD_REPOSITORY_LOCALPATH/." "$SYSTEM_DEFAULTWORKINGDIRECTORY" -r + displayName: Copy $(Build.Repository.Name) content to default workding directroy +``` + +### Declare repository resources + +Suppose the self (primary) repository name is `cicd`, and in the pipeline file, we declare a repository resource to the repository found at `AzureDevOpsProjectName/another_repo`. + +```yml +resources: + repositories: + - repository: another_repo + type: git + name: AzureDevOpsProjectName/another_repo +``` + +From the very beginning of the pipeline line, the `another_repo` repository and the `self` repository will be automatically checked out at `/home/vsts/work/1/s` + +```bash +ls -lart /home/vsts/work/1 +total 24 +drwxr-xr-x 2 vsts docker 4096 Apr 3 12:52 b +drwxr-xr-x 2 vsts docker 4096 Apr 3 12:52 a +drwxr-xr-x 2 vsts docker 4096 Apr 3 12:52 TestResults +drwxr-xr-x 6 vsts docker 4096 Apr 3 12:52 . +drwxr-xr-x 4 vsts docker 4096 Apr 3 12:52 s +drwxr-xr-x 7 vsts root 4096 Apr 3 12:52 .. + +ls -lart /home/vsts/work/1/s +total 16 +drwxr-xr-x 6 vsts docker 4096 Apr 3 12:52 .. +drwxr-xr-x 2 vsts docker 4096 Apr 3 12:52 cicd +drwxr-xr-x 2 vsts docker 4096 Apr 3 12:52 another_repo +drwxr-xr-x 4 vsts docker 4096 Apr 3 12:52 . + +``` + +At this point, the following variables having following values: + +| Predefined variable name | Value | When | +|--------------------------------|-------------------------------|---------------------------| +| Pipeline.Workspace | /home/vsts/work/1 | Beginning of the pipeline | +| Agent.BuildDirectory | /home/vsts/work/1 | Beginning of the pipeline | +| Build.ArtifactStagingDirectory | /home/vsts/work/1/a | Beginning of the pipeline | +| Build.StagingDirectory | /home/vsts/work/1/a | Beginning of the pipeline | +| Build.BinariesDirectory | /home/vsts/work/1/b | Beginning of the pipeline | +| System.DefaultWorkingDirectory | /home/vsts/work/1/s | Beginning of the pipeline | +| Build.SourcesDirectory | /home/vsts/work/1/s | Beginning of the pipeline | +| Build.Repository.LocalPath | **/home/vsts/work/1/s/cicd** | Beginning of the pipeline | +| Common.TestResultsDirectory | /home/vsts/work/1/TestResults | Beginning of the pipeline | +| PWD | /home/vsts/work/1/s | Beginning of the pipeline | + +!!! warning + + We see both the self repository (cicd) and the external repository (another_repo) is saved to `/home/vsts/work/1/s`, this is because during the compiling time, the pipeline found that we will checkout both the repositories, but if there wouldn't have been the checkout out of the external repository, the `/home/vsts/work/1/s` directory will be empty at this step. + +### Checkout self to its repository name + +```ymal +- checkout: self + persistCredentials: true + path: $(Build.Repository.Name) +``` + +```bash +pwd +/home/vsts/work/1/s + +ls -lart /home/vsts/work/1 +total 28 +drwxr-xr-x 2 vsts docker 4096 Apr 1 08:51 b +drwxr-xr-x 2 vsts docker 4096 Apr 1 08:51 a +drwxr-xr-x 2 vsts docker 4096 Apr 1 08:51 TestResults +drwxr-xr-x 7 vsts root 4096 Apr 1 08:51 .. +drwxr-xr-x 3 vsts docker 4096 Apr 1 08:51 s +drwxr-xr-x 7 vsts docker 4096 Apr 1 08:51 . +drwxr-xr-x 4 vsts docker 4096 Apr 1 08:51 cicd + +ls -lart /home/vsts/work/1/s +total 12 +drwxr-xr-x 2 vsts docker 4096 Apr 1 08:51 another_repo +drwxr-xr-x 3 vsts docker 4096 Apr 1 08:51 . +drwxr-xr-x 7 vsts docker 4096 Apr 1 08:51 .. +``` + +At this point, the following variables having following values: + +| Predefined variable name | Value | When | +|--------------------------------|-------------------------------|------------------------------------------| +| Pipeline.Workspace | /home/vsts/work/1 | After checking out self to its repo name | +| Agent.BuildDirectory | /home/vsts/work/1 | After checking out self to its repo name | +| Build.ArtifactStagingDirectory | /home/vsts/work/1/a | After checking out self to its repo name | +| Build.StagingDirectory | /home/vsts/work/1/a | After checking out self to its repo name | +| Build.BinariesDirectory | /home/vsts/work/1/b | After checking out self to its repo name | +| System.DefaultWorkingDirectory | /home/vsts/work/1/s | After checking out self to its repo name | +| Build.SourcesDirectory | /home/vsts/work/1/s | After checking out self to its repo name | +| Build.Repository.LocalPath | **/home/vsts/work/1/cicd** | After checking out self to its repo name | +| Common.TestResultsDirectory | /home/vsts/work/1/TestResults | After checking out self to its repo name | +| PWD | /home/vsts/work/1/s | After checking out self to its repo name | + +### Checkout another repository to its repository name + +```bash +- checkout: another_repo + path: another_repo +``` + +```bash +pwd +/home/vsts/work/1/s + +ls -lart /home/vsts/work/1 +total 32 +drwxr-xr-x 2 vsts docker 4096 Apr 3 12:52 b +drwxr-xr-x 2 vsts docker 4096 Apr 3 12:52 a +drwxr-xr-x 2 vsts docker 4096 Apr 3 12:52 TestResults +drwxr-xr-x 7 vsts root 4096 Apr 3 12:52 .. +drwxr-xr-x 4 vsts docker 4096 Apr 3 12:52 cicd +drwxr-xr-x 2 vsts docker 4096 Apr 3 12:52 s +drwxr-xr-x 8 vsts docker 4096 Apr 3 12:52 . +drwxr-xr-x 4 vsts docker 4096 Apr 3 12:52 another_repo + +ls -lart /home/vsts/work/1/s +total 8 +drwxr-xr-x 2 vsts docker 4096 Apr 3 12:52 . +drwxr-xr-x 8 vsts docker 4096 Apr 3 12:52 .. +``` + +!!! warning + + At this point, nothing exists anymore in the `/home/vsts/work/1/s` folder, remember there was the folder `another_repo` in the previous step. The checkout step moved `/home/vsts/work/1/s/another_repo` to `/home/vsts/work/1/another_repo`. + +At this point, the following variables having following values: + +| Predefined variable name | Value | When | +|--------------------------------|-------------------------------|--------------------------------------------------| +| Pipeline.Workspace | /home/vsts/work/1 | After checking out another_repo to its repo name | +| Agent.BuildDirectory | /home/vsts/work/1 | After checking out another_repo to its repo name | +| Build.ArtifactStagingDirectory | /home/vsts/work/1/a | After checking out another_repo to its repo name | +| Build.StagingDirectory | /home/vsts/work/1/a | After checking out another_repo to its repo name | +| Build.BinariesDirectory | /home/vsts/work/1/b | After checking out another_repo to its repo name | +| System.DefaultWorkingDirectory | /home/vsts/work/1/s | After checking out another_repo to its repo name | +| Build.SourcesDirectory | /home/vsts/work/1/s | After checking out another_repo to its repo name | +| Build.Repository.LocalPath | **/home/vsts/work/1/cicd** | After checking out another_repo to its repo name | +| Common.TestResultsDirectory | /home/vsts/work/1/TestResults | After checking out another_repo to its repo name | +| PWD | /home/vsts/work/1/s | After checking out another_repo to its repo name | + +### Move self to System.DefaultWorkingDirectory + +Once we have multi-checkout repositories in a pipeline, the source code of the self (primary) repository won't be saved in `/home/vsts/work/1/s`, where is pointed by the `System.DefaultWorkingDirectory` variable, but `System.DefaultWorkingDirectory` is the default working directory of the script task, we can add `workingDirectory:` parameter to the script task to change the path, but if we have many script tasks, and even they're declared in some shared templates, it would be difficult to change it. So we need to manually move the source repository content back to `/home/vsts/work/1/s`: + +```bash +- script: | + cp "$BUILD_REPOSITORY_LOCALPATH/." "$SYSTEM_DEFAULTWORKINGDIRECTORY" -r + displayName: Copy $(Build.Repository.Name) content to default workding directroy +``` + +```bash +pwd +/home/vsts/work/1/s + +ls -lart /home/vsts/work/1 +total 32 +drwxr-xr-x 2 vsts docker 4096 Apr 1 08:51 b +drwxr-xr-x 2 vsts docker 4096 Apr 1 08:51 a +drwxr-xr-x 2 vsts docker 4096 Apr 1 08:51 TestResults +drwxr-xr-x 7 vsts root 4096 Apr 1 08:51 .. +drwxr-xr-x 4 vsts docker 4096 Apr 1 08:51 cicd +drwxr-xr-x 4 vsts docker 4096 Apr 1 08:51 s +drwxr-xr-x 8 vsts docker 4096 Apr 1 08:51 . +drwxr-xr-x 4 vsts docker 4096 Apr 1 08:51 another_repo + +ls -lart /home/vsts/work/1/s +total 20 +-rw-r--r-- 1 vsts docker 0 Apr 1 08:51 repo_cicd.md +-rw-r--r-- 1 vsts docker 985 Apr 1 08:51 README.md +drwxr-xr-x 8 vsts docker 4096 Apr 1 08:51 .git +drwxr-xr-x 3 vsts docker 4096 Apr 1 08:51 .azure-pipelines +drwxr-xr-x 4 vsts docker 4096 Apr 1 08:51 . +drwxr-xr-x 8 vsts docker 4096 Apr 1 08:51 .. + +ls -lart +total 20 +-rw-r--r-- 1 vsts docker 0 Apr 1 08:51 repo_cicd.md +-rw-r--r-- 1 vsts docker 985 Apr 1 08:51 README.md +drwxr-xr-x 8 vsts docker 4096 Apr 1 08:51 .git +drwxr-xr-x 3 vsts docker 4096 Apr 1 08:51 .azure-pipelines +drwxr-xr-x 4 vsts docker 4096 Apr 1 08:51 . +drwxr-xr-x 8 vsts docker 4096 Apr 1 08:51 .. +``` + +## Single self checkout with default path + +```yml +resources: + repositories: + - repository: another_repo + type: git + name: AzureDevOpsProjectName/another_repo + +steps: + - checkout: self + persistCredentials: true +``` + +### Before checkout + +```bash +pwd +/home/vsts/work/1/s + +ls -lart /home/vsts/work/1 +total 24 +drwxr-xr-x 2 vsts docker 4096 Apr 3 21:14 s +drwxr-xr-x 2 vsts docker 4096 Apr 3 21:14 b +drwxr-xr-x 2 vsts docker 4096 Apr 3 21:14 a +drwxr-xr-x 2 vsts docker 4096 Apr 3 21:14 TestResults +drwxr-xr-x 6 vsts docker 4096 Apr 3 21:14 . +drwxr-xr-x 7 vsts root 4096 Apr 3 21:14 .. + +ls -lart /home/vsts/work/1/s +total 8 +drwxr-xr-x 6 vsts docker 4096 Apr 3 21:14 .. +drwxr-xr-x 2 vsts docker 4096 Apr 3 21:14 . +``` + +| Predefined variable name | Value | When | +|--------------------------------|---------------------|-----------------| +| System.DefaultWorkingDirectory | /home/vsts/work/1/s | before checkout | +| Build.SourcesDirectory | /home/vsts/work/1/s | before checkout | +| Build.Repository.LocalPath | /home/vsts/work/1/s | before checkout | + +### After checkout + +```bash +pwd +/home/vsts/work/1/s + +ls -lart /home/vsts/work/1 +total 24 +drwxr-xr-x 2 vsts docker 4096 Apr 3 21:14 b +drwxr-xr-x 2 vsts docker 4096 Apr 3 21:14 a +drwxr-xr-x 2 vsts docker 4096 Apr 3 21:14 TestResults +drwxr-xr-x 7 vsts root 4096 Apr 3 21:14 .. +drwxr-xr-x 6 vsts docker 4096 Apr 3 21:14 . +drwxr-xr-x 4 vsts docker 4096 Apr 3 21:14 s + +ls -lart /home/vsts/work/1/s +total 20 +drwxr-xr-x 6 vsts docker 4096 Apr 3 21:14 .. +-rw-r--r-- 1 vsts docker 0 Apr 3 21:14 repo_cicd.md +-rw-r--r-- 1 vsts docker 985 Apr 3 21:14 README.md +drwxr-xr-x 3 vsts docker 4096 Apr 3 21:14 .azure-pipelines +drwxr-xr-x 4 vsts docker 4096 Apr 3 21:14 . +drwxr-xr-x 8 vsts docker 4096 Apr 3 21:14 .git +``` + +| Predefined variable name | Value | When | +|--------------------------------|---------------------|----------------| +| System.DefaultWorkingDirectory | /home/vsts/work/1/s | after checkout | +| Build.SourcesDirectory | /home/vsts/work/1/s | after checkout | +| Build.Repository.LocalPath | /home/vsts/work/1/s | after checkout | + +## Single self checkout with custom path + +```yml +resources: + repositories: + - repository: another_repo + type: git + name: AzureDevOpsProjectName/another_repo + +steps: + - checkout: self + persistCredentials: true + path: $(Build.Repository.Name) +``` + +### Before checkout + +```bash +pwd +/home/vsts/work/1/s + +ls -lart /home/vsts/work/1 +total 24 +drwxr-xr-x 2 vsts docker 4096 Apr 3 21:10 s +drwxr-xr-x 2 vsts docker 4096 Apr 3 21:10 b +drwxr-xr-x 2 vsts docker 4096 Apr 3 21:10 a +drwxr-xr-x 2 vsts docker 4096 Apr 3 21:10 TestResults +drwxr-xr-x 6 vsts docker 4096 Apr 3 21:10 . +drwxr-xr-x 7 vsts root 4096 Apr 3 21:10 .. + +ls -lart /home/vsts/work/1/s +total 8 +drwxr-xr-x 6 vsts docker 4096 Apr 3 21:10 .. +drwxr-xr-x 2 vsts docker 4096 Apr 3 21:10 . +``` + +| Predefined variable name | Value | When | +|--------------------------------|---------------------|-----------------| +| System.DefaultWorkingDirectory | /home/vsts/work/1/s | before checkout | +| Build.SourcesDirectory | /home/vsts/work/1/s | before checkout | +| Build.Repository.LocalPath | /home/vsts/work/1/s | before checkout | + +### After checkout + +```bash +pwd +/home/vsts/work/1/cicd + +ls -lart /home/vsts/work/1 +total 24 +drwxr-xr-x 2 vsts docker 4096 Apr 3 21:10 b +drwxr-xr-x 2 vsts docker 4096 Apr 3 21:10 a +drwxr-xr-x 2 vsts docker 4096 Apr 3 21:10 TestResults +drwxr-xr-x 7 vsts root 4096 Apr 3 21:10 .. +drwxr-xr-x 6 vsts docker 4096 Apr 3 21:10 . +drwxr-xr-x 4 vsts docker 4096 Apr 3 21:10 cicd + +ls -lart /home/vsts/work/1/s +ls: cannot access '/home/vsts/work/1/s': No such file or directory +``` + +| Predefined variable name | Value | When | +|--------------------------------|------------------------|----------------| +| System.DefaultWorkingDirectory | /home/vsts/work/1/cicd | after checkout | +| Build.SourcesDirectory | /home/vsts/work/1/cicd | after checkout | +| Build.Repository.LocalPath | /home/vsts/work/1/cicd | after checkout | + +## No self checkout but single external checkout with default path + +```yml +resources: + repositories: + - repository: another_repo + type: git + name: AzureDevOpsProjectName/another_repo + +steps: + - checkout: another_repo +``` + +### Before checkout + +```bash +pwd +/home/vsts/work/1/s + +ls -lart /home/vsts/work/1 +total 24 +drwxr-xr-x 2 vsts docker 4096 Apr 3 21:25 s +drwxr-xr-x 2 vsts docker 4096 Apr 3 21:25 b +drwxr-xr-x 2 vsts docker 4096 Apr 3 21:25 a +drwxr-xr-x 2 vsts docker 4096 Apr 3 21:25 TestResults +drwxr-xr-x 6 vsts docker 4096 Apr 3 21:25 . +drwxr-xr-x 7 vsts root 4096 Apr 3 21:25 .. + +ls -lart /home/vsts/work/1/s +total 8 +drwxr-xr-x 6 vsts docker 4096 Apr 3 21:25 .. +drwxr-xr-x 2 vsts docker 4096 Apr 3 21:25 . +``` + +| Predefined variable name | Value | When | +|--------------------------------|---------------------|-----------------| +| System.DefaultWorkingDirectory | /home/vsts/work/1/s | before checkout | +| Build.SourcesDirectory | /home/vsts/work/1/s | before checkout | +| Build.Repository.LocalPath | /home/vsts/work/1/s | before checkout | + +### After checkout + +```bash +pwd +/home/vsts/work/1/s + +ls -lart /home/vsts/work/1 +total 24 +drwxr-xr-x 2 vsts docker 4096 Apr 3 21:25 b +drwxr-xr-x 2 vsts docker 4096 Apr 3 21:25 a +drwxr-xr-x 2 vsts docker 4096 Apr 3 21:25 TestResults +drwxr-xr-x 7 vsts root 4096 Apr 3 21:25 .. +drwxr-xr-x 6 vsts docker 4096 Apr 3 21:25 . +drwxr-xr-x 4 vsts docker 4096 Apr 3 21:25 s + +ls -lart /home/vsts/work/1/s +total 40 +drwxr-xr-x 6 vsts docker 4096 Apr 3 21:25 .. +-rw-r--r-- 1 vsts docker 947 Apr 3 21:25 README.md +drwxr-xr-x 8 vsts docker 4096 Apr 3 21:25 .git +drwxr-xr-x 5 vsts docker 4096 Apr 3 21:25 repo_another_repo +drwxr-xr-x 4 vsts docker 4096 Apr 3 21:25 . +``` + +| Predefined variable name | Value | When | +|--------------------------------|---------------------|----------------| +| System.DefaultWorkingDirectory | /home/vsts/work/1/s | after checkout | +| Build.SourcesDirectory | /home/vsts/work/1/s | after checkout | +| Build.Repository.LocalPath | /home/vsts/work/1/s | after checkout | + +## No self checkout but single external checkout with custom path + +Please see following pipeline example, we define an external repository called `another_repo`, but we don't checkout the self repository, and we only checkout this external repository. + +```yml +resources: + repositories: + - repository: another_repo + type: git + name: AzureDevOpsProjectName/another_repo + +steps: + - checkout: another_repo + path: another_repo +``` + +### Before checkout + +```bash +pwd +/home/vsts/work/1/s + +ls -lart /home/vsts/work/1 +total 24 +drwxr-xr-x 2 vsts docker 4096 Apr 3 20:52 s +drwxr-xr-x 2 vsts docker 4096 Apr 3 20:52 b +drwxr-xr-x 2 vsts docker 4096 Apr 3 20:52 a +drwxr-xr-x 2 vsts docker 4096 Apr 3 20:52 TestResults +drwxr-xr-x 6 vsts docker 4096 Apr 3 20:52 . +drwxr-xr-x 7 vsts root 4096 Apr 3 20:52 .. + +ls -lart /home/vsts/work/1/s +total 8 +drwxr-xr-x 6 vsts docker 4096 Apr 3 20:52 .. +drwxr-xr-x 2 vsts docker 4096 Apr 3 20:52 . +``` + +| Predefined variable name | Value | When | +|--------------------------------|---------------------|-----------------| +| System.DefaultWorkingDirectory | /home/vsts/work/1/s | before checkout | +| Build.SourcesDirectory | /home/vsts/work/1/s | before checkout | +| Build.Repository.LocalPath | /home/vsts/work/1/s | before checkout | + +### After checkout + +```bash +pwd +/home/vsts/work/1/another_repo + +ls -lart /home/vsts/work/1 +total 24 +drwxr-xr-x 2 vsts docker 4096 Apr 3 20:52 b +drwxr-xr-x 2 vsts docker 4096 Apr 3 20:52 a +drwxr-xr-x 2 vsts docker 4096 Apr 3 20:52 TestResults +drwxr-xr-x 7 vsts root 4096 Apr 3 20:52 .. +drwxr-xr-x 6 vsts docker 4096 Apr 3 20:53 . +drwxr-xr-x 4 vsts docker 4096 Apr 3 20:53 another_repo + +ls -lart /home/vsts/work/1/s +ls: cannot access '/home/vsts/work/1/s': No such file or directory +``` + +| Predefined variable name | Value | When | +|--------------------------------|-------------------------------------------------|----------------| +| System.DefaultWorkingDirectory | /home/vsts/work/1/another_repo | after checkout | +| Build.SourcesDirectory | /home/vsts/work/1/another_repo | after checkout | +| Build.Repository.LocalPath | /home/vsts/work/1/another_repo | after checkout | + +## No self checkout but multiple external checkout + +```yml +resources: + repositories: + - repository: another_repo1 + type: git + name: AzureDevOpsProjectName/another_repo1 + - repository: another_repo2 + type: git + name: AzureDevOpsProjectName/another_repo2 + +steps: + - checkout: another_repo1 + path: another_repo1 + - checkout: another_repo2 + path: another_repo2 +``` + +### Before checkout + +```bash +pwd +/home/vsts/work/1/s + +ls -lart /home/vsts/work/1 +total 24 +drwxr-xr-x 5 vsts docker 4096 Apr 3 20:59 s +drwxr-xr-x 2 vsts docker 4096 Apr 3 20:59 b +drwxr-xr-x 2 vsts docker 4096 Apr 3 20:59 a +drwxr-xr-x 2 vsts docker 4096 Apr 3 20:59 TestResults +drwxr-xr-x 6 vsts docker 4096 Apr 3 20:59 . +drwxr-xr-x 7 vsts root 4096 Apr 3 20:59 .. + +ls -lart /home/vsts/work/1/s +total 20 +drwxr-xr-x 2 vsts docker 4096 Apr 3 20:59 cicd +drwxr-xr-x 2 vsts docker 4096 Apr 3 20:59 another_repo1 +drwxr-xr-x 2 vsts docker 4096 Apr 3 20:59 another_repo2 +drwxr-xr-x 6 vsts docker 4096 Apr 3 20:59 .. +drwxr-xr-x 5 vsts docker 4096 Apr 3 20:59 . +``` + +| Predefined variable name | Value | When | +|--------------------------------|---------------------|-----------------| +| System.DefaultWorkingDirectory | /home/vsts/work/1/s | before checkout | +| Build.SourcesDirectory | /home/vsts/work/1/s | before checkout | +| Build.Repository.LocalPath | /home/vsts/work/1/s | before checkout | + +### After checkout + +```bash +pwd +/home/vsts/work/1/s + +ls -lart /home/vsts/work/1 +total 32 +drwxr-xr-x 2 vsts docker 4096 Apr 3 20:59 b +drwxr-xr-x 2 vsts docker 4096 Apr 3 20:59 a +drwxr-xr-x 2 vsts docker 4096 Apr 3 20:59 TestResults +drwxr-xr-x 7 vsts root 4096 Apr 3 20:59 .. +drwxr-xr-x 4 vsts docker 4096 Apr 3 20:59 another_repo1 +drwxr-xr-x 3 vsts docker 4096 Apr 3 20:59 s +drwxr-xr-x 8 vsts docker 4096 Apr 3 20:59 . +drwxr-xr-x 4 vsts docker 4096 Apr 3 20:59 another_repo2 + +ls -lart /home/vsts/work/1/s +total 12 +drwxr-xr-x 2 vsts docker 4096 Apr 3 20:59 cicd +drwxr-xr-x 3 vsts docker 4096 Apr 3 20:59 . +drwxr-xr-x 8 vsts docker 4096 Apr 3 20:59 .. +``` + +| Predefined variable name | Value | When | +|--------------------------------|---------------------|-----------------| +| System.DefaultWorkingDirectory | /home/vsts/work/1/s | before checkout | +| Build.SourcesDirectory | /home/vsts/work/1/s | before checkout | +| Build.Repository.LocalPath | /home/vsts/work/1/s | before checkout | diff --git a/docs/posts/2022/2022-06-08-using-databricks-connect-inside-a-container.md b/docs/posts/2022/2022-06-08-using-databricks-connect-inside-a-container.md new file mode 100644 index 00000000..c80144c5 --- /dev/null +++ b/docs/posts/2022/2022-06-08-using-databricks-connect-inside-a-container.md @@ -0,0 +1,207 @@ +--- +authors: +- copdips +categories: +- databricks +- vscode +- container +- docker +- spark +comments: true +date: + created: 2022-06-08 + updated: 2022-10-15 +description: Using Databricks Connect inside a container with VSCode remote containers + with spark, jre, python, databricks-connect pre-installed. +--- + +# Using Databricks Connect inside a container + +## Why use Databricks Connect + +From the very beginning of the Databricks Connect [official doc](https://docs.databricks.com/dev-tools/databricks-connect.html), it says already that Databricks Connect has some [limitations](https://docs.databricks.com/dev-tools/databricks-connect.html#limitations) and is more or less deprecated in favor of [dbx](https://docs.databricks.com/dev-tools/dbx.html). But for some usages like local IDE live debug, Databricks Connect is still a very good tool where as dbx cannot do it at all. At the time of writing, dbx is mainly a Databricks jobs' API wrapper to deploy and run Databricks jobs. + +!!! note + + Note 2022-10-15, recently a Databricks [blog](https://www.databricks.com/blog/2022/07/07/introducing-spark-connect-the-power-of-apache-spark-everywhere.html) introduced the project [Spark Connect](https://issues.apache.org/jira/browse/SPARK-39375) which aims to do very similar thing as Databricks Connect. This project is still in development, and is a part of Apache Spark, not Databricks specific. + +A very important point to be taken into account is that if we plan to deploy production ready Databricks workflows, it's recommended to use `dbx`. currently it's not official supported by Databricks (version number starts with 0), but it's good enough to use, I've already used it since several months. And as it's a deployment tool, even if it bugs, it will be much less dangerous for production. + +Just a quick helper information of `dbx`: + +```bash +$ dbx --version +[dbx][2022-10-15 22:43:24.265] 🧱Databricks eXtensions aka dbx, version ~> 0.7.6 + +$ dbx --help + + Usage: dbx [OPTIONS] COMMAND [ARGS]... + + 🧱Databricks eXtensions aka dbx. Please find the main docs page here. + +╭─ Options ──────────────────────────────────────────────────────────────────────────╮ +│ --version │ +│ --install-completion Install completion for the current shell. │ +│ --show-completion Show completion for the current shell, to copy it or │ +│ customize the installation. │ +│ --help Show this message and exit. │ +╰────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Commands ─────────────────────────────────────────────────────────────────────────╮ +│ configure 🔧 Configures project environment in the current folder. │ +│ deploy 📦 Deploy project to artifact storage. │ +│ destroy 🚮 Delete defined workflows and relevant assets. │ +│ execute 🔥 Executes chosen workload on the interactive cluster. │ +│ init 💎 Generates new project from the template. │ +│ launch 🚀 Launch the workflow on a job cluster. │ +│ sync 🔄 Sync local files to Databricks and watch for changes. │ +╰────────────────────────────────────────────────────────────────────────────────────╯ +``` + +## Using Databricks Connect outside a container + +Just follow the [official guide](https://docs.databricks.com/dev-tools/databricks-connect.html). + +## Using Databricks Connect inside a container + +VSCode has a very nice feature that enable us to [develop inside a container](https://code.visualstudio.com/docs/remote/containers). As Databricks Connect needs some setup, we can leverage this feature to prepare a container that having everything pre-configured. When we need to do a live debug, just connect VSCode to the container, then set some breakpoints and start the debug. + +We need following folder and files to use VSCode remote container: + +```bash +.databricks-connect.template +.devcontainer/ +├── Dockerfile +└── devcontainer.json +databricks_demo_job.py +``` + +And the content of each files: + +### .databricks-connect.template + +```json +// to find the config values: https://docs.databricks.com/dev-tools/databricks-connect.html#step-2-configure-connection-properties + +// inside the container, we can use `databricks-connect configure` to create this file, but it takes time, that's why we pre-created this file before container build. + +{ + "host": "https://aaa.azuredatabricks.net/", + "token": "replacetoken", + "cluster_id": "abc", + "org_id": "111111", + "port": "15001" +} +``` + +### Dockerfile + +My test is run in a Databricks cluster with the [runtime 10.4](https://docs.databricks.com/dev-tools/databricks-connect.html#requirements), which is bound to `Python 3.8`. At the time of writing, Databricks only releases a beta version for the runtime 10.4: `databricks-connect==10.4.0b0`. In the future, as per the official doc, it would be better to use the convention `databricks-connect==10.4.*`. + +The official doc says also that only `OpenJDK 8 JRE` is supported by the Databricks Connect client. But `default-jre` installed in the Dockerfile is for `python:3.8`, which is bound to `3.8-bullseye`, which means the JRE version is [v11](https://packages.debian.org/bullseye/default-jre). If we encounter some bugs when using Databricks Connect, we might need to install `OpenJDK 8 JRE`. + +`ENV SPARK_HOME` is tested from my Python:3.8 image, once in the container, run the command `databricks-connect get-spark-home` to check if it's the same. If not, update the Dockerfile. + +```dockerfile +# https://github.com/microsoft/vscode-dev-containers/blob/main/containers/python-3/.devcontainer/Dockerfile + +# [Choice] Python version (use -bullseye variants on local arm64/Apple Silicon): 3, 3.10, 3.9, 3.8, 3.7, 3.6, 3-bullseye, 3.10-bullseye, 3.9-bullseye, 3.8-bullseye, 3.7-bullseye, 3.6-bullseye, 3-buster, 3.10-buster, 3.9-buster, 3.8-buster, 3.7-buster, 3.6-buster + +ARG VARIANT="3.8" +FROM mcr.microsoft.com/vscode/devcontainers/python:${VARIANT} + +ARG DEV_DATABRICKS_TOKEN + +COPY .databricks-connect.template /home/vscode/.databricks-connect + +RUN && sudo apt update \ + && sudo apt-get install -y default-jre \ + && pip install databricks-connect==10.4.0b0 \ + && pip install -U pip \ + && sed -i "s/replacetoken/${DEV_DATABRICKS_TOKEN}/g" /home/vscode/.databricks-connect + +ENV SPARK_HOME /usr/local/lib/python3.8/site-packages/pyspark +``` + +### devcontainer.json + +```json +// Config options: https://aka.ms/devcontainer.json + +// File example: https://github.com/microsoft/vscode-dev-containers/blob/main/containers/python-3/.devcontainer/devcontainer.json +{ + + "name": "Python 3", + "build": { + "dockerfile": "Dockerfile", + "context": "..", + "args": { + // Update 'VARIANT' to pick a Python version: 3, 3.10, 3.9, 3.8, 3.7, 3.6 + // Append -bullseye or -buster to pin to an OS version. + // Use -bullseye variants on local on arm64/Apple Silicon. + "VARIANT": "3.10-bullseye", + "DEV_DATABRICKS_TOKEN": "${localEnv:DEV_DATABRICKS_TOKEN}" + } + }, + + // Configure tool-specific properties. + "customizations": { + // Configure properties specific to VS Code. + "vscode": { + // Set *default* container specific settings.json values on container create. + "settings": { + "python.defaultInterpreterPath": "/usr/local/bin/python", + "python.linting.enabled": true, + "python.linting.pylintEnabled": true, + "python.formatting.autopep8Path": "/usr/local/py-utils/bin/autopep8", + "python.formatting.blackPath": "/usr/local/py-utils/bin/black", + "python.formatting.yapfPath": "/usr/local/py-utils/bin/yapf", + "python.linting.banditPath": "/usr/local/py-utils/bin/bandit", + "python.linting.flake8Path": "/usr/local/py-utils/bin/flake8", + "python.linting.mypyPath": "/usr/local/py-utils/bin/mypy", + "python.linting.pycodestylePath": "/usr/local/py-utils/bin/pycodestyle", + "python.linting.pydocstylePath": "/usr/local/py-utils/bin/pydocstyle", + "python.linting.pylintPath": "/usr/local/py-utils/bin/pylint" + }, + + // Add the IDs of extensions you want installed when the container is created. + "extensions": ["ms-python.python", "ms-python.vscode-pylance"] + } + }, + + // Use 'forwardPorts' to make a list of ports inside the container available locally. + // "forwardPorts": [], + + // Use 'postCreateCommand' to run commands after the container is created. + // "postCreateCommand": "pip3 install --user -r requirements.txt", + + // Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root. + "remoteUser": "vscode" +} +``` + +### databricks_demo_job.py + +```python +# example taken from: https://docs.databricks.com/dev-tools/databricks-connect.html#access-dbutils + +from pyspark.sql import SparkSession +from pyspark.dbutils import DBUtils + +spark = SparkSession.builder.getOrCreate() + +dbutils = DBUtils(spark) +print(dbutils.fs.ls("dbfs:/")) +print(dbutils.secrets.listScopes()) +``` + +### env var DEV_DATABRICKS_TOKEN + +As you can see, in the file `.databricks-connect.template`, there's a line `"token": "replacetoken",`. +In fact, during the build of the Dockerfile, it will replace the string `replacetoken` by the value of the env var `DEV_DATABRICKS_TOKEN`. So we need to create this env var in advance. + +### Test + +1. From VSCode, type `F1`, choose `Remote-Containers: Reopen in Container`, VSCode will open a new instance. If you check the lower left corner of VSCode, you'll see `Dev Container: Python 3`. +2. Run `cat ~/.databricks-connect`, you should see the correct config. +3. Run `databricks-connect test`, it should not raise any error, and might have the phrase `* All tests passed.` in the end. If the cluster is not started yet, it could take some time during this step. +4. Set a breakpoint in the file `databricks_demo_job.py`, type `F5`, have fun. diff --git a/docs/posts/2022/2022-07-03-azure-pipeline-conditions.md b/docs/posts/2022/2022-07-03-azure-pipeline-conditions.md new file mode 100644 index 00000000..e4786a72 --- /dev/null +++ b/docs/posts/2022/2022-07-03-azure-pipeline-conditions.md @@ -0,0 +1,49 @@ +--- +authors: +- copdips +categories: +- azure +- cicd +comments: true +date: + created: 2022-07-03 +description: '' +--- + +# Azure pipeline conditions + +Azure pipeline has two kinds of conditions: + +1. With keyword [`condition`](https://docs.microsoft.com/en-us/azure/devops/pipelines/process/conditions?view=azure-devops&tabs=yaml) +2. With jinja like format [`${{if elseif else}}`](https://docs.microsoft.com/en-us/azure/devops/pipelines/process/expressions?view=azure-devops#conditional-insertion) + +In both syntax, we have use parameters and variables, but there's a big difference between them which makes DevOps frustrated. + +## Conditions with keyword ${{if elseif else}} + +With `${{if elseif else}}` condition, the using parameters and variables' values are calculated during the `compilation/parsing/loading time`, which means: + +- Even if you define a variable before the `${{if elseif else}}` block, but the condition is always evaluated to `false` if you use this variable in the condition, as it considers the value doesn't exist yet during the compilation, so if you have a `- ${{ else }}` block, it will always be executed. +- In a `template`, unless the parameters' values can be calculated from the loading time, otherwise they're always evaluated to its default value, if the default value is not defined, Azure pipeline will not raise any error, the condition check just returns `always false`, so the pipeline will never run into it except for the `- ${{ else }}` block. +- But in a `root pipeline` out of template, it's the [real parameter value](https://docs.microsoft.com/en-us/azure/devops/pipelines/process/runtime-parameters?view=azure-devops&tabs=script#use-parameters-to-determine-what-steps-run) being evaluated in the `${{if elseif else}}` block. +- Some predefined variables cannot be used in `${{if elseif else}}` neither, check the column `Available in templates?` in the [Use predefined variables doc](https://docs.microsoft.com/en-us/azure/devops/pipelines/build/variables?view=azure-devops&tabs=yaml), which means these values are always evaluated to `null`. +- When evaluated to `false`, the tasks, scripts, etc. wont even be shown as skipped in the Azure pipelines UI, they're just `not shown`. +- The official doc calls the parameters as [runtime parameters](https://docs.microsoft.com/en-us/azure/devops/pipelines/process/runtime-parameters?view=azure-devops&tabs=script), but in fact they're runtime only when they're not in a template. + +## Conditions with keyword condition + +The [official doc](https://docs.microsoft.com/en-us/azure/devops/pipelines/process/conditions?view=azure-devops&tabs=yaml) puts the `condition` keyword format in the `Jobs and stages` level, but in fact, we can also use it in `tasks` or `scripts` level. + +- Same as to `${{if elseif else}}` condition, if you use `parameters` in `condition` keyword conditions, it's value is calculated in the compilation time, so be careful with their usages. +- `Variables` in the conditions are evaluated `in real time`, this is the only point that make DevOps happy. +- If you really want to evaluate `in real time the parameters`, the workaround is to add a script task in advance that [define some variables](https://docs.microsoft.com/en-us/azure/devops/pipelines/process/set-variables-scripts?view=azure-devops&tabs=bash) taking the values of parameters, and then use these variables in the conditions with `condition` keyword. +- When evaluated to `false`, the tasks, scripts, etc. bound by the conditions will be `shown as skipped` in the Azure pipelines UI. +- As `condition` keyword is bound to a single task, script, jobs, stages, etc., if you want to for example run 3 tasks under the same condition, you need to add the same condition to the 3 tasks respectively, whereas with `${{if elseif else}}`, we can group the 3 tasks under the same condition, but as explained above, the values of compared parameters or variables referenced in the `${{if elseif else}}` format conditions are evaluated during the compilation/loading time, so `${{if elseif else}}` will not work for all the use cases, this is the biggest pity of Azure Pipeline from my point of view. +- We can add `condition` to `jobs`, and inside the jobs, we can have multiple tasks, this could be a workaround of above pity if we do not want add condition to each task with the same condition. + +## A table to sum up + +| Inputs \ Conditions | ${{if elseif else}} keyword | condition keyword | +| ------------------- | -------------------------------- | ------------------------------------------------ | +| parameter | compilation/parsing/loading time | compilation/parsing/loading time | +| variable | compilation/parsing/loading time | real time (except for some predefined variables) | diff --git a/docs/posts/2022/2022-07-28-databricks-job-context.md b/docs/posts/2022/2022-07-28-databricks-job-context.md new file mode 100644 index 00000000..896e88d5 --- /dev/null +++ b/docs/posts/2022/2022-07-28-databricks-job-context.md @@ -0,0 +1,100 @@ +--- +authors: +- copdips +categories: +- databricks +- azure +comments: true +date: + created: 2022-07-28 +description: Giving an example of Databricks job/task json context values +--- + +# Databricks job/task context + +Suppose we're running following job/task in a Azure Databricks workspace: + +```yaml +jobId: "1111" +jobRunId: "2222" +taskRunId: "3333" +jobName: "ths job name" +taskName: "first-task" +databricksWorkspaceUrl: https://adb-4444444444.123.azuredatabricks.net/ +``` + +Run below command in a Databricks job (task precisely): + +```bash +dbutils.notebook.entry_point.getDbutils().notebook().getContext().toJson() +``` + +We will get following json: + +```json +{ + "rootRunId": null, + "currentRunId": null, + "jobGroup": "7777777777777777777_8888888888888888888_job-1111-run-3333-action-9999999999999999", + "tags": { + "jobId": "1111", # job id + "jobName": "ths job name", + "jobClusterKey": "ths job name", + "multitaskParentRunId": "2222", # this is the job run id + "taskKey": "first-task", # task name + "jobRunOriginalAttempt": "3333", # first task run id + "jobRunAttempt": "3333", + "idInJob": "3333", + "runId": "3333", # current task run id, could be different to `jobRunOriginalAttempt` if retry on failure + "jobOwnerId": "01010101010101", + "opId": "ServerBackend-5fe4478cdfb206ba", + "jobFallbackOndemand": "true", + "opTarget": "com.databricks.backend.common.rpc.InternalDriverBackendMessages$StartRepl", + "taskDependencies": "[]", + "eventName": "runExecution", + "serverBackendName": "com.databricks.backend.daemon.driver.DriverCorral", + "projectName": "driver", + "jobClusterNumContainers": "1", + "jobMiscMessage": "In run", + "jobTriggerTime": "1659015591689", + "buildHash": "a2e5769182f120d638a865bc99430452da7670de", + "effectiveSparkVersion": "", + "sparkVersion": "", + "userProvidedSparkVersion": "10.4.x-cpu-ml-scala2.12", + "jobTriggerSource": "DbScheduler", + "host": "1.2.3.4", + "clusterId": "0728-133953-i3676wgl", + "hostName": "0728-133953-i3676wgl-1-2-3-4", + "jettyRpcJettyVersion": "9", + "orgId": "4444444444", # the id in the Databricks workspace url https://adb-{orgId}.{randomNumber}.azuredatabricks.net/ + "jobType": "NORMAL", + "jobTimeoutSec": "0", + "maxConcurrentRuns": "10", + "rootOpId": "ServiceMain-1ffca09fcc660002", + "jobClusterType": "job_cluster", + "executorName": "ActiveRunMonitor-job-run-pool", + "jobUseSpot": "true", + "jobTerminalState": "Running", + "userId": "01010101010101", # user id in Databricks, same as jobOwnerId in this example as the job is running by the job owner + "jobTriggerId": "0", + "opType": "ServerBackend", + "jobTriggerType": "manual", + "jobTaskType": "python", + "isGitRun": "false", + "user": "00000000-0000-0000-0000-000000000000", # user name or sp id, or etc. + "parentOpId": "RPCClient-1ffca09fcc6602f4", + "jettyRpcType": "InternalDriverBackendMessages$DriverBackendRequest" + }, + "extraContext": { + "notebook_path": "dbfs:/dbx/my_repo_unique_name/f80372effd494fd79d3831d69fb5d3cd/artifacts/repo_name/tasks/first/entrypoint.py", + "api_url": "https://westeurope.azuredatabricks.net", # ! This is not the Databricks workspace URL where the job is running, I find nowhere having the full Databricks workspace URL, `orgId` is not enough, as there's a random number right after it in the URL. + "api_token": "[REDACTED]", + "non_uc_api_token": "" + }, + "credentialKeys": [ + "adls_aad_token", + "adls_gen2_aad_token", + "synapse_aad_token" + ] +} +``` diff --git a/docs/posts/2022/2022-08-14-azure-pipeline-jobs.md b/docs/posts/2022/2022-08-14-azure-pipeline-jobs.md new file mode 100644 index 00000000..db089d43 --- /dev/null +++ b/docs/posts/2022/2022-08-14-azure-pipeline-jobs.md @@ -0,0 +1,96 @@ +--- +authors: +- copdips +categories: +- azure +- cicd +comments: true +date: + created: 2022-08-14 +description: '' +--- + +# Azure pipeline jobs + +## Traditional jobs vs deployment jobs + +- [traditional jobs](https://docs.microsoft.com/en-us/azure/devops/pipelines/process/phases?view=azure-devops&tabs=yaml) run in parallel, +- [deployment jobs](https://docs.microsoft.com/en-us/azure/devops/pipelines/process/deployment-jobs?view=azure-devops) run in sequence, save the deployment history to a environment and a resource, and can also be applied with deployment strategy (runOnce, rolling, and the canary) + +## Deployment jobs + +### Tracking deployment history + +As per example given [here](https://docs.microsoft.com/en-us/azure/devops/pipelines/process/deployment-jobs?view=azure-devops#runonce-deployment-strategy-1): we can use `RunOnce deployment strategy` to create some environments with empty resources and use that as an abstract shell to record deployment history, as the deployment history is across pipelines, down to a specific resource and status of the deployments for auditing. + +### Sharing output variables + +The syntax is [here](https://docs.microsoft.com/en-us/azure/devops/pipelines/process/deployment-jobs?view=azure-devops#support-for-output-variables). + +Be careful that we must provide the `` in the `outputs` part. In the below example we can see that the deployement `A` is specified twice: `$[ dependencies.A.outputs['A.setvarStep.myOutputVar'] ]` + +```yaml +# Set an output variable in a lifecycle hook of a deployment job executing runOnce strategy. +- deployment: A + pool: + vmImage: 'ubuntu-latest' + environment: staging + strategy: + runOnce: + deploy: + steps: + - bash: echo "##vso[task.setvariable variable=myOutputVar;isOutput=true]this is the deployment variable value" + name: setvarStep + - bash: echo $(setvarStep.myOutputVar) + name: echovar + +# Map the variable from the job. +- job: B + dependsOn: A + pool: + vmImage: 'ubuntu-latest' + variables: + myVarFromDeploymentJob: $[ dependencies.A.outputs['A.setvarStep.myOutputVar'] ] + steps: + - script: "echo $(myVarFromDeploymentJob)" + name: echovar +``` + +When you output a variable from a deployment job, referencing it from the next job uses different syntax depending on if you want to set a variable or use it as a condition for the stage. + +```yaml +stages: +- stage: StageA + jobs: + - job: A1 + steps: + - pwsh: echo "##vso[task.setvariable variable=RunStageB;isOutput=true]true" + name: setvarStep + - bash: echo $(System.JobName) + +- stage: StageB + dependsOn: + - StageA + + # when used in a condition, job name `A1` is included in variable path. + condition: eq(dependencies.StageA.outputs['A1.setvarStep.RunStageB'], 'true') + + # when use to set a variable, jon name `A1` is not included in the variable path. + variables: + myOutputVar: $[stageDependencies.StageA.A1.outputs['setvarStep.RunStageB']] + jobs: + - deployment: B1 + pool: + vmImage: 'ubuntu-latest' + environment: envB + strategy: + runOnce: + deploy: + steps: + - bash: echo $(myOutputVar) +``` + +!!! note + + Here is the doc for [defining variables](https://docs.microsoft.com/en-us/azure/devops/pipelines/process/variables?view=azure-devops). + diff --git a/docs/posts/2022/2022-09-12-azure-pipeline-system-access-token-in-shared-pipeline.md b/docs/posts/2022/2022-09-12-azure-pipeline-system-access-token-in-shared-pipeline.md new file mode 100644 index 00000000..086f14d5 --- /dev/null +++ b/docs/posts/2022/2022-09-12-azure-pipeline-system-access-token-in-shared-pipeline.md @@ -0,0 +1,52 @@ +--- +authors: +- copdips +categories: +- azure +- cicd +comments: true +date: + created: 2022-09-12 +description: '' +--- + +# Azure pipeline System.AccessToken in shared pipeline + +## Var $(System.AccessToken) + +[System.AccessToken](https://docs.microsoft.com/en-us/azure/devops/pipelines/build/variables?view=azure-devops&tabs=yaml#systemaccesstoken) is a special variable that carries the security token used by the running build. If you check the doc of [job authorization scope](https://docs.microsoft.com/en-us/azure/devops/pipelines/process/access-tokens?view=azure-devops&tabs=yaml#job-authorization-scope), you might think the var `$(System.AccessToken)` has by default the access to all the repositories in the same project where hosts the calling Azure pipeline. But unfortunately, it's only partially right. + +## Problem + +Suppose following situation: + +`ProjectA.RepoOne.PipelineOne`: The Azure DevOps repository `RepoOne` in the Azure DevOps project `ProjectA` has a pipeline `PipelineOne`, the `PipelineOne` just gets the `repositoryId` by `repositoryName`, behind the scenes, it calls the [Azure DevOps API](https://docs.microsoft.com/en-us/rest/api/azure/devops/search/repositories/get?view=azure-devops-rest-7.1). We need to provide an access token to call this API, in our case, we use the built-in `$(System.AccessToken)`. + +After the test, if we give `RepoOne` as the repository name, the pipeline works well, and returns the repositoryId of the repository `RepoOne`. But if we give another repository name (for e.g. `RepoTwo`), which is in the same project `ProjectA`, you will get an error something like: + +```bash +401 Client Error: Unauthorized for url: https://almsearch.dev.azure.com/... +``` + +## Root cause + +This is because although the `$(System.AccessToken)` is designed to have access to all the repositories in the same project, there's still another level of security control which blocks the API call, which is the pipeline level permission. + +## Solution + +To fix this, one of the solutions is to add the target repository as [repositories resource](https://docs.microsoft.com/en-us/azure/devops/pipelines/process/resources?view=azure-devops&tabs=schema#define-a-repositories-resource) in the `PipelineOne` yaml file: + +```yaml +resources: + repositories: + - repository: RepoTwo + type: git + name: ProjectA/RepoTwo +``` + +When we re-run `PipelineOne`, this time the pipeline will be in pending for asking for the permission to access to the `RepoTwo` repository, we need to `manually` click on the `permit` button to grant this access, and then the pipeline will succeed as expected. + +!!! note + + The repositories resource **does not accept variables** in the `repository` and `name` values which makes the pipeline authoring a little bit sticky. We must write letter by letter the project name and repository name in string, so we need to declare as many repositories resources as the repositories in the same project on which we want to apply the `PipelineOne`. + diff --git a/docs/posts/2022/2022-09-15-adding-data-files-to-python-package-with-setup-py.md b/docs/posts/2022/2022-09-15-adding-data-files-to-python-package-with-setup-py.md new file mode 100644 index 00000000..b3ba0ec8 --- /dev/null +++ b/docs/posts/2022/2022-09-15-adding-data-files-to-python-package-with-setup-py.md @@ -0,0 +1,157 @@ +--- +authors: +- copdips +categories: +- python +- package +comments: true +date: + created: 2022-09-15 +description: '' +--- + +# Adding data files to Python package with setup.py + +## setup.py vs pyproject.toml + +`pyproject.toml` is the new Python project metadata specification standard since [PEP 621](https://peps.python.org/pep-0621/). As per [PEP 517](https://www.python.org/dev/peps/pep-0517/), and as per one of the comments of this [StackOverflow thread](https://stackoverflow.com/a/62983901/5095636), in some rare cases, we might have a chicken and egg problem when using `setup.py` if it needs to import something from the package it's building. The only thing that `pyproject.toml` cannot achieve for the moment is the installation in [editable mode](https://packaging.python.org/en/latest/guides/distributing-packages-using-setuptools/#working-in-development-mode), where we must use `setup.py`. Another advantage of `setup.py` is that we can compute some variables dynamically during the build time as it's a Python file. + +Nevertheless, `setup.py` is still a widely used solid tool to build Python package. This post will discuss how to add data files (non Python files) to a Python wheel package built by `setup.py`, the source distribution files (sdist .tar.gz files, .zip for Windows) are not covered by this post. + +## Adding data files + +### With parameter package_data for files inside a package + +Official doc: [https://docs.python.org/3/distutils/setupscript.html#installing-package-data](https://docs.python.org/3/distutils/setupscript.html#installing-package-data) + +`package_data` accepts wildcard, but from the given example, the data files **must exist inside a Python module folder** (coexist with file `__init__.py`), you cannot use `package_data` to include files from non module folders, for e.g. the folder `conf` where there's no `__init__.py` file inside. + +```bash +setup.py +conf/ + conf.json +src/ + mypkg/ + __init__.py + module.py + data/ + tables.dat + spoons.dat + forks.dat +``` + +```python +setup(..., + packages=['mypkg'], + package_dir={'mypkg': 'src/mypkg'}, + package_data={'mypkg': ['data/*.dat']}, + ) +``` + +### With parameter data_files for any files + +official doc: [https://docs.python.org/3/distutils/setupscript.html#installing-additional-files](https://docs.python.org/3/distutils/setupscript.html#installing-additional-files) + +!!! warning + + `distutils` is deprecated, and will be [remove in Python 3.12](https://docs.python.org/3/distutils/index.html#distributing-python-modules-legacy-version) as per [PEP 632](https://peps.python.org/pep-0632/), the migration path is to simply use [setuptools](https://setuptools.pypa.io/en/latest/deprecated/distutils-legacy.html). + +```python +setup(..., + data_files=[ + ('bitmaps', ['bm/b1.gif', 'bm/b2.gif']), + ('config', ['cfg/data.cfg']), + ({dest_folder_path_in_wheel}, [{source_file_path_relative_to_setup.py_script}]), + ], +) +``` + +From the above example, we can see that: + +1. `data_files` accepts any files from any folder, in contrast to `package_data` which accepts files inside a package folder. +2. `data_files` takes files one by one, we can not use the wildcard like * to specify a set of source files. +3. after build, there's a `.whl` wheel file generated, the `source_file_path_relative_to_setup` will be added to the path `{package_name}-{package_version}.data/data/{dest_folder_path_in_wheel}/{source_file_name}`, and the Python files are added to `{module_name}/{python_package_original_path}`. If you want to put the data files at the original path, you need to replace `{dest_folder_path_in_wheel}` with `../../{data_files_original_path}`, the first two `..` is just to escape two folder levels from `{package_name}-{package_version}.data/data/`. + +### With file MANIFEST.in + +From my understanding and tests, `MANIFEST.in` file is only for sdist, so out of the scope of this post which talks about bdist wheel package only. + +### Parameter zip_safe + +If you're using old-fashion egg file, to reference data files inside package, should put `zie_safe=False` during built. Otherwise, for modern Python packaging, this parameter is [obsolete](https://setuptools.pypa.io/en/latest/deprecated/zip_safe.html#understanding-the-zip-safe-flag). + +## Loading data files + +A very good sum-up can be found in this [StackOverflow thread](https://stackoverflow.com/a/58941536/5095636). + +### Loading data files packaged by package_data + +* With [importlib.resources](https://docs.python.org/3/library/importlib.html#module-importlib.resources), [importlib.metadata](https://docs.python.org/3/library/importlib.metadata.html) or their backports [importlib_resources](https://pypi.org/project/importlib_resources) [importlib_metadata](https://pypi.org/project/importlib_metadata). + + ```python + # to read file from module_a/folder_b/file.json + import importlib.resources + import json + + # open_text is deprecated in Python3.11 as only support files in Python modules + # see below example how to use `importlib.resources.files` + json.load(importlib.resources.open_text("module_a.folder_b", "file.json")) + ``` + + Check this [doc](https://importlib-resources.readthedocs.io/en/latest/migration.html#migration-guide) for migration from `pkg_resources`. + +* With deprecated [pkg_resources](https://setuptools.pypa.io/en/latest/pkg_resources.html#) from setuptools of pypa.io, and some examples from [here](https://godatadriven.com/blog/a-practical-guide-to-setuptools-and-pyproject-toml/) or [here](https://dbx.readthedocs.io/en/latest/guides/python/packaging_files/#using-the-referenced-files). + + !!! warning + + [pkg_resources](https://setuptools.pypa.io/en/latest/pkg_resources.html) is deprecated due to some performance issue, and also need to install third-party setuptools for the run which should only be used during the build. + + ```python + # to read file from module_a/folder_b/file.json + import json + import pkg_resources + + json.load(pkg_resources.resource_stream("module_a", "folder_b/file.json")) + ``` + +### Loading data files packaged by data_files + +As data files packaged by `data_files` parameter could be in any folder, not necessarily inside a Python module with `__init__` file, in such case the new `importlib.resources.open_text`can not be used anymore, and indeed marked as [deprecated in Python 3.11](https://docs.python.org/3.11/library/importlib.resources.html?highlight=read_text#deprecated-functions). + +* Use stdlib `importlib.resources.files` to read file from `module_a/folder_b/file.json` + + !!! note + + This method can also be used to [load data files packaged by package_data](#loading-data-files-packaged-by-data_files) + + ```python + try: + # new stdlib in Python3.9 + from importlib.resources import files + except ImportError: + # third-party package, backport for Python3.9-, + # need to add importlib_resources to requirements + from importlib_resources import files + import json + + # with `data_files` in `setup.py`, + # we can specify where to put the files in the wheel package, + # so inside the module_a for example + with open(files(module_a).joinpath("folder_b/file.json")) as f: + print(json.load(f)) + ``` + +* Use deprecated third-party `pkg_resources` to read file from `module_a/folder_b/file.json` + + ```python + import json + import pkg_resources + + # use `data_files` in `setup.py`, we can specify where to put the files, + # so inside the module_a for example + json.load(pkg_resources.resource_stream("module_a", "folder_b/file.json")) + ``` + +* Use stdlib `pkgtuil.get_data` + + You can find an example in this [StackOverflow thread](https://stackoverflow.com/a/58941536/5095636). All the answers and the comments are worth reading. Be aware that `pkgutil.get_date()` could be [deprecated](https://gitlab.com/python-devs/importlib_resources/-/issues/58#note_329352693) too one day. diff --git a/docs/posts/2022/2022-09-20-databricks-cluster-access-mode.md b/docs/posts/2022/2022-09-20-databricks-cluster-access-mode.md new file mode 100644 index 00000000..91aead50 --- /dev/null +++ b/docs/posts/2022/2022-09-20-databricks-cluster-access-mode.md @@ -0,0 +1,228 @@ +--- +authors: +- copdips +categories: +- azure +- databricks +- spark +comments: true +date: + created: 2022-09-20 +description: '' +--- + +# Databricks cluster access mode + +## What is cluster access mode + +Just a copy from [Azure Databricks official doc](https://learn.microsoft.com/en-us/azure/databricks/data-governance/unity-catalog/compute#--what-is-cluster-access-mode): + +!!! note + + [Amazon Databricks official doc](https://docs.databricks.com/clusters/cluster-ui-preview.html#what-is-cluster-access-mode) has less info on access mode. + +| Access Mode | Visible to user | UC Support | Supported Languages | Notes | +| --------------------- | ---------------------------------------- | ---------- | --------------------- | ---------------------------------------------------------------------------------------------------------------------------- | +| `Single User` | Always | Yes | Python, SQL, Scala, R | Can be assigned to and used by a single user only. Dynamic views are not supported. Credential passthrough is not supported. | +| `Shared` | Always (Premium plan required) | Yes | Python, SQL | Init scripts, third-party libraries, and JARS are not supported. Credential passthrough is not supported. | +| `No Isolation Shared` | Hidden (Enforce User Isolation required) | No | Python, SQL, Scala, R | Admin console configuration required to enforce user isolation | +| `Custom` | Hidden (For all new clusters) | No | Python, SQL, Scala, R | This option is shown only if you have existing clusters without a specified access mode. | + +`Single User` mode is easy to understand, the cluster is reserved to a single user, other user cannot use it. + +`Custom` mode is often seen in job cluster, which means cluster created by a job running in a cluster pool for example, because when creating a cluster pool, there's no option for access mode. + +This post will talk about `Shared` and `No Isolation Shared` access modes. + +!!! note + + All the below examples were tested on a cluster with Databricks runtime v10.4 LTS (Scala 2.12 Spark 3.2.1). + +## `Shared` access mode + +From two different users, running the same command `python -m site`, I got two different results. + +* in a notebook from `user1`, the mapped user is `spark-6166cfd7-9154-4017-b0ff-89`: + +```python +%%sh +whoami +echo ====== +which python +echo ====== +python -m site + +# outputs: +spark-6166cfd7-9154-4017-b0ff-89 +====== +/databricks/python3/bin/python +====== +sys.path = [ + '/home/spark-6166cfd7-9154-4017-b0ff-89', + '/databricks/spark/python', + '/databricks/spark/python/lib/py4j-0.10.9.1-src.zip', + '/databricks/jars/spark--driver--driver-spark_3.2_2.12_deploy.jar', + '/WSFS_NOTEBOOK_DIR', + '/databricks/python_shell', + '/usr/lib/python38.zip', + '/usr/lib/python3.8', + '/usr/lib/python3.8/lib-dynload', + '/databricks/python3/lib/python3.8/site-packages', + '/usr/local/lib/python3.8/dist-packages', + '/usr/lib/python3/dist-packages', +] +USER_BASE: '/home/spark-6166cfd7-9154-4017-b0ff-89/.local' (exists) +USER_SITE: '/home/spark-6166cfd7-9154-4017-b0ff-89/.local/lib/python3.8/site-packages' (doesn't exist) +ENABLE_USER_SITE: True +``` + +* in a notebook from `user2`, the mapped user is `spark-5a9eefa7-49d3-4176-9805-1e`: + +```bash +%%sh +whoami +echo ====== +which python +echo ====== +python -m site + +# outputs: +spark-5a9eefa7-49d3-4176-9805-1e +====== +/databricks/python3/bin/python +====== +sys.path = [ + '/home/spark-5a9eefa7-49d3-4176-9805-1e', + '/databricks/spark/python', + '/databricks/spark/python/lib/py4j-0.10.9.1-src.zip', + '/databricks/jars/spark--driver--driver-spark_3.2_2.12_deploy.jar', + '/WSFS_NOTEBOOK_DIR', + '/databricks/python_shell', + '/usr/lib/python38.zip', + '/usr/lib/python3.8', + '/usr/lib/python3.8/lib-dynload', + '/databricks/python3/lib/python3.8/site-packages', + '/usr/local/lib/python3.8/dist-packages', + '/usr/lib/python3/dist-packages', +] +USER_BASE: '/home/spark-5a9eefa7-49d3-4176-9805-1e/.local' (exists) +USER_SITE: '/home/spark-5a9eefa7-49d3-4176-9805-1e/.local/lib/python3.8/site-packages' (doesn't exist) +ENABLE_USER_SITE: True +``` + +* Pip install a third party Python module will fail + +Below example demonstrates the phrase "*Init scripts, third-party libraries, and JARS are not supported*" in the above table. + +```bash +%%sh +pip install requests==2.26.0 +# same error message for: `python -m pip install requests==2.26.0 --user`, +# except for there's no the first phrase: "Defaulting to user installation because normal site-packages is not writeable" + +Defaulting to user installation because normal site-packages is not writeable +Looking in indexes: https://[REDACTED]:****@[REDACTED]/_packaging/[REDACTED]/pypi/simple/ +Collecting requests==2.26.0 + Downloading https://[REDACTED]/_packaging/daa86ee5-06b8-417b-bc88-e64e3e2eef29/pypi/download/requests/2.26/requests-2.26.0-py2.py3-none-any.whl (62 kB) +Requirement already satisfied: certifi>=2017.4.17 in /databricks/python3/lib/python3.8/site-packages (from requests==2.26.0) (2020.12.5) +Requirement already satisfied: urllib3<1.27,>=1.21.1 in /databricks/python3/lib/python3.8/site-packages (from requests==2.26.0) (1.25.11) +Requirement already satisfied: idna<4,>=2.5 in /databricks/python3/lib/python3.8/site-packages (from requests==2.26.0) (2.10) +Collecting charset-normalizer~=2.0.0 + Downloading https://[REDACTED]/_packaging/daa86ee5-06b8-417b-bc88-e64e3e2eef29/pypi/download/charset-normalizer/2.0.12/charset_normalizer-2.0.12-py3-none-any.whl (39 kB) +ERROR: Will not install to the user site because it will lack sys.path precedence to requests in /databricks/python3/lib/python3.8/site-packages +WARNING: You are using pip version 21.0.1; however, version 22.2.2 is available. +You should consider upgrading via the '/databricks/python3/bin/python -m pip install --upgrade pip' command. +CalledProcessError: Command 'b'pip install requests==2.26.0\n'' returned non-zero exit status 1. +``` + +## `No Isolation Shared` access mode + +!!! warning + + Update 2023-02-01, I retested the `No Isolation Shared` access mode today, it seems that something has been changed at Databricks level. + +Hereunder the new behavior: + +1. The user is still `root`, but the Python binary is not a system one, instead an isolated venv is used, and pip install occurs in the venv too. +2. For the same user, each time we re-attach to the cluster, the venv path is changed. And therefore, previous pip install is discarded. + +```python +%%sh +whoami +echo ====== +which python +echo ====== +python -m site + +# outputs: +root +====== +/local_disk0/.ephemeral_nfs/envs/pythonEnv-76eac499-b8f2-451c-ac6a-88f9a68fcae7/bin/python +====== +sys.path = [ + '/databricks/driver', + '/databricks/spark/python', + '/databricks/spark/python/lib/py4j-0.10.9.5-src.zip', + '/databricks/jars/spark--driver--driver-spark_3.3_2.12_deploy.jar', + '/WSFS_NOTEBOOK_DIR', + '/databricks/jars/spark--maven-trees--ml--11.x--graphframes--org.graphframes--graphframes_2.12--org.graphframes__graphframes_2.12__0.8.2-db1-spark3.2.jar', + '/databricks/python_shell', + '/usr/lib/python39.zip', + '/usr/lib/python3.9', + '/usr/lib/python3.9/lib-dynload', + '/local_disk0/.ephemeral_nfs/envs/pythonEnv-76eac499-b8f2-451c-ac6a-88f9a68fcae7/lib/python3.9/site-packages', + '/local_disk0/.ephemeral_nfs/cluster_libraries/python/lib/python3.9/site-packages', + '/databricks/python/lib/python3.9/site-packages', + '/usr/local/lib/python3.9/dist-packages', + '/usr/lib/python3/dist-packages', + '/databricks/.python_edge_libs', +] +USER_BASE: '/root/.local' (exists) +USER_SITE: '/root/.local/lib/python3.9/site-packages' (doesn't exist) +ENABLE_USER_SITE: False +``` + +Below is the test result on 2022-09-20: + +In contrast to `Shared` mode, within the `No Isolation Shared` mode, running the same commands, I got the same results from two different users. +We can find that all the users are logged as `root` account. + +```python +%%sh +whoami +echo ====== +which python +echo ====== +python -m site + +# outputs: +root +====== +/databricks/python3/bin/python +====== +sys.path = [ + '/databricks/driver', + '/databricks/spark/python', + '/databricks/spark/python/lib/py4j-0.10.9-src.zip', + '/databricks/jars/spark--driver--driver-spark_3.1_2.12_deploy.jar', + '/WSFS_NOTEBOOK_DIR', + '/databricks/python_shell', + '/usr/lib/python38.zip', + '/usr/lib/python3.8', + '/usr/lib/python3.8/lib-dynload', + '/databricks/python3/lib/python3.8/site-packages', + '/usr/local/lib/python3.8/dist-packages', + '/usr/lib/python3/dist-packages', +] +USER_BASE: '/root/.local' (exists) +USER_SITE: '/root/.local/lib/python3.8/site-packages' (doesn't exist) +ENABLE_USER_SITE: True +``` + +* Pip install a third party Python module will succeed + +## Conclusion + +* `Shared` access mode maps different users to different user space, their environments are isolated, but they cannot install any additional packages or modules. +* `No Isolation Shared` access mode maps all the users to the root account, everything is shared, they can install anything, but the changes imply to all users. **After cluster restart, all the additional installations are purged**. So maybe one project per cluster is a choice. +* Another good choice is to use the `non-interactive job cluster` with a cluster pool, where the cluster pool is shared, but any user can install anything (can be limited by cluster policy), and the installation is isolated at job level. Which means even two jobs are created by the same user, the two jobs will use different environments (VMs with Databricks runtime container re-deployed in the cluster pool after each job run). diff --git a/docs/posts/2022/2022-11-09-azure-pipeline-delete-blobs-from-blob-storage.md b/docs/posts/2022/2022-11-09-azure-pipeline-delete-blobs-from-blob-storage.md new file mode 100644 index 00000000..44a6f483 --- /dev/null +++ b/docs/posts/2022/2022-11-09-azure-pipeline-delete-blobs-from-blob-storage.md @@ -0,0 +1,97 @@ +--- +authors: +- copdips +categories: +- azure +- cicd +- storage +comments: true +date: + created: 2022-11-09 +description: '' +--- + +# Azure pipeline delete blobs from blob storage + +The example given by this post is **for Azure Pipeline with the latest Ubuntu agent**, for AzCli from local machine, removing the `--auth-mode login` part should work. + +As it's a Linux pipeline agent, the pipeline task [AzureFileCopy](https://learn.microsoft.com/en-us/azure/devops/pipelines/tasks/deploy/azure-file-copy?view=azure-devops) can not be used, it's written in Powershell, we should use the [AzureCLI](https://learn.microsoft.com/en-us/azure/devops/pipelines/tasks/deploy/azure-cli?view=azure-devops) task instead. + +## Working example + +Suppose we have following use case: + +| type | value | +| ---------------------------- | ----------------------------------- | +| storage account name | sto | +| container name | con | +| blob 1 path in blob storage | folder/sub_folder/blob1 | +| blob 2 path in blob storage | folder/sub_folder/blob2 | +| blob 1 path in local machine | local_folder/local_sub_folder/blob1 | +| blob 2 path in local machine | local_folder/local_sub_folder/blob2 | + +The virtual folder `folder/sub_folder/` has only 2 blobs as shown in the above table. + +Hereunder the Azure Pipeline code to delete existing files from `folder/sub_folder/` in the Azure blob storage and than upload all the local files from `local_folder/local_sub_folder/` to `folder/sub_folder/`: + +```yaml +- task: AzureCLI@2 + displayName: Az File Copy to Storage + inputs: + azureSubscription: $(serviceConnection) + scriptType: bash + scriptLocation: inlineScript + inlineScript: | + az config set extension.use_dynamic_install=yes_without_prompt + folder_path="folder/sub_folder" + + echo "##[command]Getting existing_files" + existing_files=$(az storage fs file list \ + --auth-mode login \ + -f con \ + --account-name sto \ + --path $folder_path | jq) + echo -e "existing_files:\n$existing_files" + + echo "##[command]Stating delete" + echo $existing_files | jq .[].name -r | while read file ; do \ + az storage blob delete \ + --auth-mode login \ + -c con \ + --account-name sto \ + -n "$file" ; \ + done + + echo "##[command]Starting update-batch" + az storage blob upload-batch \ + --auth-mode login \ + --destination con \ + --account-name sto \ + --destination-path $folder_path \ + --source "local_folder/local_sub_folder" + + echo "##[command]Listing files after upload" + az storage fs file list \ + --auth-mode login \ + -f con \ + --account-name sto \ + --path $folder_path +``` + +!!! warning + + Should not use `failOnStandardError: true` with `AzureCLI` as the commands `az config set` and `az storage blob upload-batch` send both messages to stderr. + +## Failed with `az storage azcopy blob delete` + +The best way to delete bunch of blobs is `az storage azcopy blob delete -c con --account-name sto -t folder/subfolder --recursive`. But if you use `--account-key` for auth, it's currently not available as `az storage account keys list --account-name sto` with current version (v2.41.0) of azure-cli delivered by Azure Pipeline agent has a bug like this: *AttributeError: module 'azure.mgmt.storage.v2022_05_01.models' has no attribute 'ActiveDirectoryPropertiesAccountType'* or this: *AttributeError: module 'azure.mgmt.storage.v2022_05_01.models' has no attribute 'ListKeyExpand'*. So we should use other auth methods like SAS token or connection string pre-populated in KeyVault. + +Downgrading the azure-cli version inside [AzureCLI](https://learn.microsoft.com/en-us/azure/devops/pipelines/tasks/deploy/azure-cli?view=azure-devops) during Azure pipeline might work, but not tested. + +!!! note + + `az storage azcopy blob delete --account-key` works from local machine if it's not the buggy version installed. + +## Failed with `az storage blob delete-batch` + +`az storage blob delete-batch -s con --account-name sto --delete-snapshots include --dryrun --pattern "folder/subfolder/*` could work only in case there're not many blobs inside the container `con`, otherwise this command using `--pattern` (with [Python fnmatch](https://docs.python.org/3.7/library/fnmatch.html) behind the scenes) will pending for a long time. diff --git a/docs/posts/2022/2022-11-13-azure-pipeline-windows-agent-UnicodeEncodeError.md b/docs/posts/2022/2022-11-13-azure-pipeline-windows-agent-UnicodeEncodeError.md new file mode 100644 index 00000000..78e82321 --- /dev/null +++ b/docs/posts/2022/2022-11-13-azure-pipeline-windows-agent-UnicodeEncodeError.md @@ -0,0 +1,23 @@ +--- +authors: +- copdips +categories: +- azure +- cicd +- encoding +comments: true +date: + created: 2022-11-13 +description: '' +--- + +# Azure pipeline Windows agent UnicodeEncodeError + +For people who encounter `UnicodeEncodeError` when using Windows Azure Pipeline agent, the issue might be [here](https://github.com/PrefectHQ/prefect/issues/5754#issuecomment-1312774275). + +As per above link, or this [email](https://mail.python.org/pipermail/python-list/2022-November/908164.html), the solutions could be: + +* You can override just sys.std* to UTF-8 by setting +the environment variable `PYTHONIOENCODING=UTF-8`. +* You can override all I/O to use UTF-8 by setting `PYTHONUTF8=1`, or by passing the +command-line option `-X utf8`. diff --git a/docs/posts/2022/2022-11-15-using-ast-and-cst-to-change-python-code.md b/docs/posts/2022/2022-11-15-using-ast-and-cst-to-change-python-code.md new file mode 100644 index 00000000..7f6eaad7 --- /dev/null +++ b/docs/posts/2022/2022-11-15-using-ast-and-cst-to-change-python-code.md @@ -0,0 +1,102 @@ +--- +authors: +- copdips +categories: +- python +- ast +comments: true +date: + created: 2022-11-15 +description: '' +--- + +# Using ast and cst to change Python code + +## Difference between AST and CST + +A brief comparison could be found in the [libcst doc](https://libcst.readthedocs.io/en/latest/why_libcst.html). Generally speaking, CST could keep the original source code format including the comments. + +## Using AST to change Python code + +Since **Python 3.9**, the helper [ast.unparse](https://docs.python.org/3.9/library/ast.html#ast.unparse) has been introduced, so we have both `ast.parse` and `ast.unparse` in our hands, everything is ready, finally we have an official way to change Python code. + +For example, I have a the file `setup.py` as belows: + +```py +"""setup.py file +""" +from pkg_resources import parse_requirements +from setuptools import setup + +with open("requirements.txt", encoding="utf-8") as f: + install_requires = [str(req) for req in parse_requirements(f)] + +setup( + name="foo", + install_requires=install_requires, +) +``` + +I want to change the line `install_requires=install_requires,` by `install_requires=["a==1", "b==2"],`. + +Since Python3.9, I can achieve it like this: + +```python +import ast +import json + +new_install_requires = ["a==1", "b==2"] + +setup_file = open("setup.py").read() +setup = ast.parse(setup_file) + +print("\n***Before change\n") +print(ast.unparse(setup)) + +for body in setup.body: + try: + if hasattr(body, "value") and hasattr(body.value, "keywords"): + for kw in body.value.keywords: + if kw.arg == "install_requires": + kw.value = ast.parse(json.dumps(new_install_requires)).body[0] + except Exception as err: + print(err) + +print("\n***After change\n") +print(ast.unparse(setup)) +``` + +Result from the console: + +```bash +$ python3.9 change_setup.py + +***Before change + +"""setup.py file +""" +from pkg_resources import parse_requirements +from setuptools import setup +with open('requirements.txt', encoding='utf-8') as f: + install_requires = [str(req) for req in parse_requirements(f)] +setup(name='foo', install_requires=install_requires) + +***After change + +"""setup.py file +""" +from pkg_resources import parse_requirements +from setuptools import setup +with open('requirements.txt', encoding='utf-8') as f: + install_requires = [str(req) for req in parse_requirements(f)] +setup(name='foo', install_requires= +['a==1', 'b==2']) +``` + +!!! note + + You will notice that, the `ast.parse` discards all the comments. And if need to format the code, black could be a good choice. + +## Using CST to change Python code + +An example can be found the repo [hauntsaninja/no_implicit_optional](https://github.com/hauntsaninja/no_implicit_optional) that uses the [libcst](https://github.com/Instagram/LibCST) from Instagram diff --git a/docs/posts/2022/2022-12-01-python-difference-on-subprocess-run-call-check-call-check-output.md b/docs/posts/2022/2022-12-01-python-difference-on-subprocess-run-call-check-call-check-output.md new file mode 100644 index 00000000..d28b5c6e --- /dev/null +++ b/docs/posts/2022/2022-12-01-python-difference-on-subprocess-run-call-check-call-check-output.md @@ -0,0 +1,102 @@ +--- +authors: +- copdips +categories: +- python +comments: true +date: + created: 2022-12-01 + updated: 2022-12-02 +description: '' +--- + +# Python difference on subprocess run(), call(), check_call(), check_output() + +## Difference on subprocess run(), call(), check_call(), check_output() + +Since Python 3.5, the [official doc](https://docs.python.org/3.5/library/subprocess.html#older-high-level-api) explains that: + +Prior to Python 3.5, these three functions (`subprocess.call()`, `subprocess.check_call()`, `subprocess.check_output()`) comprised the high level API to subprocess. You can now use `subprocess.run()` in many cases, but lots of existing code calls these functions. + + + +## subprocess.run common parameters + +* subprocess.run default behavior accepts arguments in list + + ```python + subprocess.run(["ls", "-l"]) + ``` + +* `shell=True` (default `False`) to send arguments in string + + ```python + subprocess.run("ls -l", shell=True) + ``` + +* `capture_output=True` (default `False`) to save output in a var + + ```python + res = subprocess.run("ls -l", shell=True, capture_output=True) + res.stdout + ``` + +* `encoding="utf-8"` (default `None`) to save var in string instead of bytes. + +* `check=True` (default `False`) to raise [`subprocess.CalledProcessError`](https://docs.python.org/3/library/subprocess.html#subprocess.CalledProcessError): if command returned non-zero exit code. But if the command executable doesn't exist for exampel missspellm you will get the error `FileNotFoundError` + +* [Popen()](https://docs.python.org/3/library/subprocess.html#using-the-subprocess-module) is for advanced usage. For example, [replacing the shell pipeline](https://docs.python.org/3/library/subprocess.html#replacing-shell-pipeline). + + shell command: + + ```shell + output=$(dmesg | grep hda) + ``` + + with Popen, becomes: + + ```python + p1 = Popen(["dmesg"], stdout=PIPE) + p2 = Popen(["grep", "hda"], stdin=p1.stdout, stdout=PIPE) + p1.stdout.close() # Allow p1 to receive a SIGPIPE if p2 exits. + output = p2.communicate()[0] + ``` + +* default params + + ```python + import subprocess + + default_run_params = dict( + capture_output=True, + encoding="utf-8", + check=True + ) + # command = ["unknown_command", "-l"] + # command = ["python", "-askjd"] + command = ["ls", "-l"] + + try: + # output type is subprocess.CompletedProcess + output = subprocess.run(command, **default_run_params) + + # print in pure string in one line + print(output) + + # print with new line just as launching from shell + print(output.stdout) + + # as we catch error with `check=True`, + # output.stderr is always an empty string. + # and output.returncode is always 0 in this case. + except FileNotFoundError as exc: + print(f"{type(exc).__name__}: {exc}") + raise + except subprocess.CalledProcessError as exc: + print(exc) # no error details will given by print(exc) + print(exc.__dict__) # print all + print(exc.returncode) + print(exc.stderr) # print error message only + # exc.stdout should be empty + raise + ``` diff --git a/docs/posts/2022/2022-12-03-syncing-repository-from-github-to-gitee.md b/docs/posts/2022/2022-12-03-syncing-repository-from-github-to-gitee.md new file mode 100644 index 00000000..86c06a7f --- /dev/null +++ b/docs/posts/2022/2022-12-03-syncing-repository-from-github-to-gitee.md @@ -0,0 +1,28 @@ +--- +authors: +- copdips +categories: +- git +comments: true +date: + created: 2022-12-03 +description: '' +--- + +# Syncing repository from github to gitee + +I need to sync github repository (files and commits only) https://github.com/copdips/copdips.github.io to gitee repository https://gitee.com/copdips/copdips.github.io. + +1. In gitee: create an empty repository, normal the same name as the one you want to sync from github. For example for this blog repository: https://gitee.com/copdips/copdips.github.io +2. In gitee: create a PAT in gitee with necessary permissions (`all` or `projects`). + The sync needs to run two commands against to gitee: + - `git push --all --force gitee` + - `git push --tags --force gitee` +3. In github repository: create 2 secrets: `GITEE_USERNAME=copdips`, and `GITEE_PAT={PAT_created_in_the_previous_step}` +4. In github repository: create a github workflow, such as: [.github/workflows/sync-to-gitee.yml](https://github.com/copdips/copdips.github.io/blob/main/.github/workflows/sync-to-gitee.yml) +5. In github repository: push the above github workflow file to github, it will automatically trigger the first sync. And from now on, all the pushes to the `main` branch will trigger a such sync too. `main` is my default branch to trigger the sync, could be changed in the workflow file. + +!!! note + + Github action within github free personal plan has a time limit at [2000 minutes per month](https://docs.github.com/en/billing/managing-billing-for-github-actions/about-billing-for-github-actions), which should be enough if you don't have many repositories and many pushes. + diff --git a/docs/posts/2022/2022-12-30-azure-pipeline-expressions.md b/docs/posts/2022/2022-12-30-azure-pipeline-expressions.md new file mode 100644 index 00000000..247a6b77 --- /dev/null +++ b/docs/posts/2022/2022-12-30-azure-pipeline-expressions.md @@ -0,0 +1,30 @@ +--- +authors: +- copdips +categories: +- azure +- cicd +comments: true +date: + created: 2022-12-30 +description: '' +draft: true +--- + +# Azure pipeline expressions + +```yml +# https://docs.microsoft.com/en-us/azure/devops/pipelines/process/expressions?view=azure-devops +# The difference between runtime and compile time expression syntaxes is primarily what context is available. In a compile-time expression (${{ }}), you have access to parameters and statically defined variables. In a runtime expression ($[ ]), you have access to more variables but no parameters. + +variables: + staticVar: 'my value' # static variable + compileVar: ${{ variables.staticVar }} # compile time expression + isMain: $[eq(variables['Build.SourceBranch'], 'refs/heads/main')] # runtime expression + +steps: + - script: | + echo ${{variables.staticVar}} # outputs my value + echo $(compileVar) # outputs my value + echo $(isMain) # outputs True +``` diff --git a/docs/posts/2023/2023-01-04-python-aiohttp-rate-limit.md b/docs/posts/2023/2023-01-04-python-aiohttp-rate-limit.md new file mode 100644 index 00000000..0773b1e5 --- /dev/null +++ b/docs/posts/2023/2023-01-04-python-aiohttp-rate-limit.md @@ -0,0 +1,133 @@ +--- +authors: +- copdips +categories: +- python +- async +comments: true +date: + created: 2023-01-04 + updated: 2023-01-06 +description: '' +--- + +# Python aiohttp rate limit +HTTP rate limit is often the max requests in a limited time period, and sometimes could also be the max concurrent requests. + +## Max requests in a limited time period + +```python +from aiolimiter import AsyncLimiter + +RATE_LIMIT_IN_SECOND = 20 +# 1.0 for time period during 1 second +rate_limit = AsyncLimiter(RATE_LIMIT_IN_SECOND, 1.0) + +async with rate_limit: + await my_aiohttp_request() +``` + +## Max concurrent requests + +Official doc: [Limiting connection pool size](https://docs.aiohttp.org/en/stable/client_advanced.html#limiting-connection-pool-size) + +```python +import aiohttp + +MAX_CONCURRENT = 10 + +async def main(): + # The default limit is 100 + connector = aiohttp.TCPConnector(limit=MAX_CONCURRENT) + + async with aiohttp.ClientSession(connector=connector) as session: + await my_aiohttp_request() + +if __name__ == "__main__": + asyncio.run(main()) +``` + +!!! warning + + The object `connector` from `connector = aiohttp.TCPConnector(limit=MAX_CONCURRENT)` must be created within an async function. + +## Example + +We can borrow the official example on [asyncio queues](https://docs.python.org/fr/3/library/asyncio-queue.html#examples). + +The below example shows how to send GET method to [https://httpbin.org/get](https://httpbin.org/get) with a rate limit of 20 requests per second and max 10 concurrent requests. + +```python +import asyncio +import random +import time + +import aiohttp +from aiolimiter import AsyncLimiter + +MAX_CONCURRENT = 10 +RATE_LIMIT_IN_SECOND = 20 +rate_limit = AsyncLimiter(RATE_LIMIT_IN_SECOND, 1.0) + + +async def my_aiohttp_request(session, name): + response = await session.get("https://httpbin.org/get") + response.raise_for_status() + json_response = await response.json() + print(f"{name} finished aiohttp request with response: {json_response}") + # do something on reponse here + + +async def worker(name, queue, session): + while True: + # Get a "work item" out of the queue. + sleep_for = await queue.get() + + # Sleep for the "sleep_for" seconds. + await asyncio.sleep(sleep_for) + + async with rate_limit: + await my_aiohttp_request(session, name) + + # Notify the queue that the "work item" has been processed. + queue.task_done() + + print(f"{name} has slept for {sleep_for:.2f} seconds") + + +async def main(): + connector = aiohttp.TCPConnector(limit=MAX_CONCURRENT) + async with aiohttp.ClientSession(connector=connector) as session: + # Create a queue that we will use to store our "workload". + queue = asyncio.Queue() + + # Generate random timings and put them into the queue. + total_sleep_time = 0 + for _ in range(20): + sleep_for = random.uniform(0.05, 1.0) + total_sleep_time += sleep_for + queue.put_nowait(sleep_for) + + # Create three worker tasks to process the queue concurrently. + tasks = [ + asyncio.create_task(worker(f"worker-{idx}", queue, session)) + for idx in range(MAX_CONCURRENT) + ] + # Wait until the queue is fully processed. + started_at = time.monotonic() + await queue.join() + total_slept_for = time.monotonic() - started_at + + # Cancel our worker tasks. + for task in tasks: + task.cancel() + # Wait until all worker tasks are cancelled. + await asyncio.gather(*tasks, return_exceptions=True) + + print("====") + print(f"3 workers slept in parallel for {total_slept_for:.2f} seconds") + print(f"total expected sleep time: {total_sleep_time:.2f} seconds") + + +asyncio.run(main()) +``` diff --git a/docs/posts/2023/2023-01-05-calling-azure-rest-api.md b/docs/posts/2023/2023-01-05-calling-azure-rest-api.md new file mode 100644 index 00000000..9d97645c --- /dev/null +++ b/docs/posts/2023/2023-01-05-calling-azure-rest-api.md @@ -0,0 +1,104 @@ +--- +authors: +- copdips +categories: +- azure +- api +comments: true +date: + created: 2023-01-05 + updated: 2023-05-23 +description: '' +--- + +# Calling Azure REST API + +This blog [Calling Azure REST API via curl](https://mauridb.medium.com/calling-azure-rest-api-via-curl-eb10a06127) is pretty good. Just two more things. + +## Auth token in curl + +We can use `curl -X GET -u :$token` instead of `curl -X GET -H "Authorization: Bearer $token"` + +## Azure DevOps API resource id for OAuth + +when using `az rest` to call [Azure DevOps API](https://learn.microsoft.com/en-us/rest/api/azure/devops/), you will get a similar error as follows: + +Can't derive appropriate Azure AD resource from --url to acquire an access token. If access token is required, use --resource to specify the resource. + + + +This is because Azure DevOps API base url: [https://dev.azure.com/](https://dev.azure.com/) or [https://vssps.dev.azure.com/](https://vssps.dev.azure.com/), etc. are not an Azure cloud endpoint. + +```bash +$ az rest --help +Command + az rest : Invoke a custom request. + This command automatically authenticates using the logged-in credential: If Authorization + header is not set, it attaches header `Authorization: Bearer `, where `` is + retrieved from AAD. The target resource of the token is derived from --url if --url starts + with an endpoint from `az cloud show --query endpoints`. You may also use --resource for a + custom resource. + If Content-Type header is not set and --body is a valid JSON string, Content-Type header + will default to application/json. + Arguments + [...redacted] + --resource : Resource url for which CLI should acquire a token from AAD + in order to access the service. The token will be placed in + the Authorization header. By default, CLI can figure this + out based on --url argument, unless you use ones not in the + list of "az cloud show --query endpoints". + [...redacted] +``` + +```bash +$ az cloud show --query endpoints +{ + "activeDirectory": "https://login.microsoftonline.com", + "activeDirectoryDataLakeResourceId": "https://datalake.azure.net/", + "activeDirectoryGraphResourceId": "https://graph.windows.net/", + "activeDirectoryResourceId": "https://management.core.windows.net/", + "appInsightsResourceId": "https://api.applicationinsights.io", + "appInsightsTelemetryChannelResourceId": "https://dc.applicationinsights.azure.com/v2/track", + "attestationResourceId": "https://attest.azure.net", + "azmirrorStorageAccountResourceId": null, + "batchResourceId": "https://batch.core.windows.net/", + "gallery": "https://gallery.azure.com/", + "logAnalyticsResourceId": "https://api.loganalytics.io", + "management": "https://management.core.windows.net/", + "mediaResourceId": "https://rest.media.azure.net", + "microsoftGraphResourceId": "https://graph.microsoft.com/", + "ossrdbmsResourceId": "https://ossrdbms-aad.database.windows.net", + "portal": "https://portal.azure.com", + "resourceManager": "https://management.azure.com/", + "sqlManagement": "https://management.core.windows.net:8443/", + "synapseAnalyticsResourceId": "https://dev.azuresynapse.net", + "vmImageAliasDoc": "https://raw.githubusercontent.com/Azure/azure-rest-api-specs/master/arm-compute/quickstart-templates/aliases.json" +} +``` + +So we need to find the resource url for Azure DevOps API. Hopefully, we can find it from this [github issue](https://github.com/Azure/azure-cli/issues/7618#issuecomment-909822540), or from the official [Azure DevOps doc](https://learn.microsoft.com/en-us/azure/devops/organizations/accounts/manage-personal-access-tokens-via-api?view=azure-devops#configure-a-quickstart-application), we can use `499b84ac-1321-427f-aa17-267ca6975798` as the value of `--resource` to call `az rest`: + +```bash +az rest \ + --resource 499b84ac-1321-427f-aa17-267ca6975798 \ + --url +``` + +When running `az rest` within Azure pipeline, we also need to add the authorization, as the SPN injected by `azureSubscription` [cannot be recognized by Azure DevOps API](https://learn.microsoft.com/en-us/azure/devops/release-notes/roadmap/support-azure-managed-identities), it's not a user account. The SPN support is in Azure DevOps road map, and planned to be released in 2023 Q1. I'll update this post once I've tested it. + +```yaml +- task: AzureCLI@2 + displayName: Az rest + inputs: + azureSubscription: $(azureResourceServiceConnection) + scriptType: bash + scriptLocation: inlineScript + inlineScript: | + az rest \ + --headers "Authorization=Bearer $SYSTEM_ACCESSTOKEN" \ + --resource 499b84ac-1321-427f-aa17-267ca6975798 \ + --url + failOnStandardError: true + env: + SYSTEM_ACCESSTOKEN: $(System.AccessToken) +``` diff --git a/docs/posts/2023/2023-01-28-sonarcloud-github-action.md b/docs/posts/2023/2023-01-28-sonarcloud-github-action.md new file mode 100644 index 00000000..da065901 --- /dev/null +++ b/docs/posts/2023/2023-01-28-sonarcloud-github-action.md @@ -0,0 +1,108 @@ +--- +authors: +- copdips +categories: +- github +- sonar +- cicd +comments: true +date: + created: 2023-01-28 +description: '' +--- + +# Sonarcloud Github Action + +[Sonarcloud Github Action](https://github.com/SonarSource/sonarcloud-github-action) doesn't work by default with Python pytest `coverage.xml` file, hereunder a working example. + +## file `.github/workflows/ci.yml` + +```yaml +# file: .github/workflows/ci.yml + +# irrelevant part is removed +env: + repo_name: repo + app_folder_name: app + coverage_percent: 90 + build_number: ${{ github.run_number }} + pytest_coverage_commentator_filename: pytest_coverage_commentator.txt + pytest_coverage_xml_file_name: coverage.xml + +- name: Test with pytest + run: | + pytest -v -s \ + --cov=$app_folder_name \ + --cov-fail-under=$coverage_percent \ + --cov-report=xml:$pytest_coverage_xml_file_name \ + --cov-report=term-missing:skip-covered + +# Codecov is a nice tool so given here too +- name: Upload coverage to Codecov + uses: codecov/codecov-action@v3 + with: + token: ${{ secrets.CODECOV_TOKEN }} + env_vars: OS,PYTHON + fail_ci_if_error: true + flags: unittests + name: codecov-repo_name + files: coverage.xml + verbose: true + +- name: Test pytest with pytest-coverage-commentator + run: | + pytest --cache-clear --cov=$app_folder_name > $pytest_coverage_commentator_filename + +- name: Comment PR with coverage + uses: coroo/pytest-coverage-commentator@v1.0.2 + with: + pytest-coverage: ${{ env.pytest_coverage_commentator_filename }} + +- name: Override Coverage Source Path for Sonar + # https://community.sonarsource.com/t/code-coverage-doesnt-work-with-github-action/16747/7 + # we should convert '/home/runner/work/pr/repo/app' to '/github/workspace//app' + # be careful DOUBLE slashes in the later part, and the app in the later part is retrieved from sonar.sources from sonar-project.properties + run: | + echo "GITHUB_WORKSPACE=$GITHUB_WORKSPACE" + echo 'coverage.xml before:' + head $GITHUB_WORKSPACE/$pytest_coverage_xml_file_name + sed -i 's@'$GITHUB_WORKSPACE'@/github/workspace/@g' $GITHUB_WORKSPACE/$pytest_coverage_xml_file_name + echo 'coverage.xml after:' + head $GITHUB_WORKSPACE/$pytest_coverage_xml_file_name + +- name: SonarCloud Scan + uses: sonarsource/sonarcloud-github-action@master + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} +``` + +## file `sonar-project.properties` + +Hereunder an example of the file `sonar-project.properties` + +```ini +# https://github.com/pbrod/numdifftools/blob/master/sonar-project.properties +# https://github.com/pbrod/numdifftools/blob/master/sonar-project_readme.txt +# https://github.com/SonarSource/sonarcloud-github-action + +sonar.organization=copdips +sonar.projectKey=copdips_reponame + +# relative paths to source directories. More details and properties are described +# in https://sonarcloud.io/documentation/project-administration/narrowing-the-focus/ +sonar.sources=folder_name + +# sonar.exclusions must specify till the file extension, +# whether *.py for python or * for any files. `folder_name/notebooks/` doesn't work. +sonar.exclusions=folder_name/notebooks/*.py + +sonar.projectVersion=${env.build_number} +# sonar.python.pylint_config=.pylintrc +sonar.python.version=3.8, 3.9, 3.10 + +# https://docs.sonarqube.org/latest/analysis/coverage/ +# https://docs.sonarqube.org/latest/analysis/analysis-parameters/ +sonar.tests=tests +sonar.python.coverage.reportPaths=${env.pytest_coverage_xml_file_name} +``` diff --git a/docs/posts/2023/2023-05-26-searching-azcli-packages-installation-path.md b/docs/posts/2023/2023-05-26-searching-azcli-packages-installation-path.md new file mode 100644 index 00000000..1accd2f2 --- /dev/null +++ b/docs/posts/2023/2023-05-26-searching-azcli-packages-installation-path.md @@ -0,0 +1,23 @@ +--- +authors: +- copdips +categories: +- python +- linux +- azure +comments: true +date: + created: 2023-05-26 +description: '' +draft: true +--- + +# Searching azcli packages installation path + +Need to debug some Azure CLI code by add some pdb breakpoints, but don't know where the code is installed ? Here is how to find it. + +```bash +$ find / -type d \( -name '*venv' -o -name '*git' \) -prune -o -type d -name 'apimanagement' -pri +nt 2>/dev/null +/opt/az/lib/python3.10/site-packages/azure/mgmt/apimanagement +``` diff --git a/docs/posts/2023/2023-07-04-python-asyncio-unittest.md b/docs/posts/2023/2023-07-04-python-asyncio-unittest.md new file mode 100644 index 00000000..0b59cb86 --- /dev/null +++ b/docs/posts/2023/2023-07-04-python-asyncio-unittest.md @@ -0,0 +1,165 @@ +--- +authors: +- copdips +categories: +- python +- async +- unittest +comments: true +date: + created: 2023-07-04 +description: Unittest based on Pytest framework not on embedded unittest. +--- + +# Python Asyncio Unittest + +Unittest based on Pytest framework not embedded unittest. + + + +## Mocking async http client aiohttp.ClientSession + +### Source code + +```python +# file path: root/module_name/foo.py +# pip install aiohttp +import aiohttp + + +class ClassFoo: + def __init__(self, access_token: str): + self.access_token = access_token + self.auth_header = {"Authorization": f"Bearer {self.access_token}"} + self.base_url = "https://foo.bar.com/api/v1" + + async def get_foo(self, foo_id: str) -> dict: + url = f"{self.base_url}/{foo_id}" + async with aiohttp.ClientSession(headers=self.auth_header) as session: + async with session.get(url) as resp: + resp.raise_for_status() + return await resp.json() +``` + +### Unittest with pytest-asyncio + +```python +# file path: root/tests/module_name/test_foo.py +# pip install pytest pytest-asyncio + +from typing import Any +import pytest +from unittest.mock import MagicMock, patch, AsyncMock +from module_name import foo as test_module + +TEST_MODULE_PATH = test_module.__name__ + + +@pytest.fixture +def mock_session(): + with patch(f"{TEST_MODULE_PATH}.aiohttp.ClientSession") as mock_client_session: + session = MagicMock() + mock_client_session.return_value.__aenter__.return_value = session + yield session + + +@pytest.fixture +def mock_service(): + access_token = "bar" + yield test_module.ApplicationsService(access_token=access_token) + + +@pytest.mark.asyncio # could be removed if asyncio_mode = "auto" +async def test_get_foo(mock_session, mock_service): + foo_id = "foo" + mock_json_response = {"key": "value"} + + mock_response = AsyncMock() + mock_response.json.return_value = mock_json_response + mock_response.raise_for_status.return_value = None + + mock_session.get.return_value.__aenter__.return_value = mock_response + + response = await mock_service.get_foo(foo_id=foo_id) + + mock_session.get.assert_called_once_with(f"{mock_service.base_url}/{foo_id}") + assert response == mock_json_response +``` + +!!! note + + If you set [`asyncio_mode = "auto"`](https://pytest-asyncio.readthedocs.io/en/latest/reference/configuration.html) (defaults to `strict`) to your config (pyproject.toml, setup.cfg or pytest.ini) there is no need for the `@pytest.mark.asyncio` marker. + +Above unittest will success but also raise a warning: + +```bash +============================= warnings summary ============================== +tests/module_name/test_foo.py::test_get_foo + root/module_name/test_foo.py:15: RuntimeWarning: coroutine 'AsyncMockMixin._execute_mock_call' was never awaited + resp.raise_for_status() + Enable tracemalloc to get traceback where the object was allocated. + See https://docs.pytest.org/en/stable/how-to/capture-warnings.html#resource-warnings for more info. +``` + +This is because `resp` is an `AsyncMock` object, `resp.raise_for_status()` will be an `AsyncMockMixin` object. But in fact, `raise_for_status()` is a traditional sync function, it will not be awaited. So we need to mock it with a `MagicMock` object: + +```py +In [1]: from unittest.mock import AsyncMock, MagicMock + +In [2]: a = AsyncMock() + +In [3]: a +Out[3]: + +In [4]: a.raise_for_status() +Out[4]: + +In [5]: a.raise_for_status = MagicMock() + +In [6]: a.raise_for_status() +Out[6]: +``` + +To fix the warning, we need to change the line: + +```python +# replace line: +mock_response.raise_for_status.return_value = None + +# by: +mock_response.raise_for_status = MagicMock() +``` + +## Pytest fixture with session scope + +Say I need a session scope fixture to perform a cleanup before all tests and after all tests: + +```python +@pytest.fixture(scope="session", autouse=True) +async def _clean_up(): + await pre_tests_function() + yield + await post_tests_function() +``` + +This session scope fixture will be called automatically before all tests and after all tests. But when you run the tests, you will get an error: + +ScopeMismatch: You tried to access the 'function' scoped fixture 'event_loop' with a 'session' scoped request object, involved factories + + + +This is because pytest-asyncio create by default a new [function scope event loop](https://pytest-asyncio.readthedocs.io/en/latest/concepts.html#asyncio-event-loops), but the async fixture `_clean_up` is session scoped and is using the event loop fixture, where the ScopeMismatch in the error message. To fix this, we need to create a new session scope event loop for the fixture `_clean_up`: + +```python +@pytest.fixture(scope="session") +def event_loop(): + loop = asyncio.get_event_loop() + yield loop + loop.close() + +@pytest.fixture(scope="session", autouse=True) +async def _clean_up(): + await pre_tests_function() + yield + await post_tests_function() +``` diff --git a/docs/posts/2023/2023-09-04-different-ssh-keys-for-different-github.com-accounts.md b/docs/posts/2023/2023-09-04-different-ssh-keys-for-different-github.com-accounts.md new file mode 100644 index 00000000..6db1eebc --- /dev/null +++ b/docs/posts/2023/2023-09-04-different-ssh-keys-for-different-github.com-accounts.md @@ -0,0 +1,40 @@ +--- +authors: +- copdips +categories: +- git +comments: true +date: + created: 2023-09-04 +description: '' +--- + +# Different ssh keys for different github.com accounts + +It might be a common case that you have multiple github.com accounts (personal and professional), and you want to use different ssh keys for different github accounts, as github.com does not allow same ssh key for different accounts with *"Key is already in use"* error. + +To achieve this, you could follow this [tutorial](https://vanthanhtran245.github.io/use-multiple-ssh-key-for-different-git-accounts/): + +1. Generate ssh keys for each github.com account. For e.g. `~/.ssh/id_rsa` and `~/.ssh/id_rsa_pro`. +2. Create a `~/.ssh/config` file to specify which ssh key to use for which github account. + + ```bash + Host github.com + HostName github.com + IdentityFile ~/.ssh/id_rsa + User copdips + + # The HostName is still github.com, but the host here is github.com-pro, this is the key point. + # You can change it to whatever you want + Host github.com-pro + HostName github.com + IdentityFile ~/.ssh/id_rsa_pro + User copdips-pro + ``` + +3. Git clone the repositories by replacing `github.com` in the git clone ssh url with the ssh alias defined in `~/.ssh/config`. + Say the pro ssh clone url is: **git@`github.com`:my-company/repo.git**, than you need to rewrite it to **git@`github.com-pro`:my-company/repo.git** to be able to use the ssh key `~/.ssh/id_rsa_pro` defined in `~/.ssh/config`. + +!!! note + + In Chrome (so as to Edge), there's an extension called [MultiLogin](https://chrome.google.com/webstore/detail/multilogin/ijfgglilaeakmoilplpcjcgjaoleopfi) that allows you to use multiple accounts (for e.g. personal and professional github.com accounts) across different tabs in the same browser instance. So you do not need to keep two browser instances opened at the same time. In Firefox, you even have a better extension called [Firefox Multi-Account Containers](https://addons.mozilla.org/en-US/firefox/addon/multi-account-containers/). diff --git a/docs/posts/2023/2023-09-14-python-asyncio.md b/docs/posts/2023/2023-09-14-python-asyncio.md new file mode 100644 index 00000000..5cb4002d --- /dev/null +++ b/docs/posts/2023/2023-09-14-python-asyncio.md @@ -0,0 +1,388 @@ +--- +authors: +- copdips +categories: +- python +- async +comments: true +date: + created: 2023-09-14 +description: '' +--- + +# Python Asyncio + +!!! note + + This is not a Python asyncio tutorial. Just some personal quick tips here, and could be updated from time to time. + +## greenlet vs gevent + +- greenlet needs manual event switch. +- gevent is based on greenlet. gevent has `gevent.monkey.patch_all()`. + +## @asyncio.coroutine + +From Python 3.8, `async def` deprecates `@asyncio.coroutine` + +## yield from + +From Python 3.5, `await` deprecates `yield from` + +## scope of await + +`await` can only be used in `async def` except in `ipython` + +## asyncio with queue + + + +## aiohttp with rate limit + + + +## get_running_loop vs get_event_loop + +- `get_running_loop` raises error if there's no running loop. +- `get_event_loop` return running loop if exists, otherwise create one and return it. + +## Awaitable vs Future vs Task vs Coroutine + +- [`Awaitable`](https://docs.python.org/3/library/asyncio-task.html#awaitables) is an object can be used in an `await` expression. There are three main types of awaitable objects: `coroutines`, `Tasks`, and `Futures`. +- [`Coroutine`](https://docs.python.org/3/library/asyncio-task.html#coroutine) is declared with the `async/await` syntax is the preferred way of writing asyncio applications. Coroutines can await on `Future` objects until they either have a result or an exception set, or until they are cancelled. Python coroutines are awaitables and therefore can be awaited from other coroutines +- [`Future`](https://docs.python.org/3/library/asyncio-future.html#asyncio.Future) is an awaitable object. A Future represents an eventual result of an asynchronous operation. Not thread-safe. +- [`Task`](https://docs.python.org/3/library/asyncio-task.html#asyncio.Task) is subclass of `Future` that runs a Python coroutine. Not thread-safe. Tasks are used to schedule coroutines concurrently. When a coroutine is wrapped into a Task with functions like `asyncio.create_task()` the coroutine is automatically scheduled to run soon + +## ensure_future vs create_task + +- `create_task` is high-level introduced in Python 3.7 and accepts only `coroutines`, returns a Task object which is subclass of Future. `create_task` must be called inside a running event loop. +- `ensure_future` is low-level and accepts both `coroutines` and `Futures`. `Task` is subclass of `Future`. If `ensure_future` gets a `Task`, it will return the input `Task` itself, as Future is ensured. If `ensure_future` gets a `coroutine`, it will call `create_task` to wrap the input `coroutine` to a `Task`, then return it. +- `create_task` must be called inside an event loop, `ensure_future` can create an event loop if not exists. +- `create_task` can name the task. + +create_task [source code](https://github.com/python/cpython/blob/124af17b6e49f0f22fbe646fb57800393235d704/Lib/asyncio/tasks.py#L369-L382), ensure_future [source code](https://github.com/python/cpython/blob/124af17b6e49f0f22fbe646fb57800393235d704/Lib/asyncio/tasks.py#L647-L652). + +[Warning on ensure_future](https://docs.python.org/3/library/asyncio-future.html#asyncio.ensure_future): +!!! warning + + Deprecated since version 3.10: Deprecation warning is emitted if obj is not a Future-like object and loop is not specified and **there is no running event loop**. Coroutine is not a Future-like object. + +## await vs await asyncio.wait_for() vs asyncio.shield() + +Almost the same. but wait_for() can set timeout, and shield() can protect a task from being cancelled. + +```py +await task + +# throw TimeoutError if timeout +await asyncio.wait_for(task, timeout) + +# still throw TimeoutError if timeout, but task.cancelled() +# inside of try/catch asyncio.TimeoutError block will be ignored, a +# nd task continues to run. +await asyncio.wait_for(asyncio.shield(task), 1) +``` + +```py +import asyncio + +async def delay(seconds): + print(f"start sleep {seconds}") + await asyncio.sleep(seconds) + print(f"end sleep") + return seconds + +async def main(): + delay_task = asyncio.create_task(delay(2)) + try: + result = await asyncio.wait_for(asyncio.shield(delay_task), 1) + print("return value:", result) + except asyncio.TimeoutError: + # shield() does not protect from timeout, so it throws TimeoutError + print("timeout") + # shield() does protect from being cancelled + print("whether the task is cancelled:", delay_task.cancelled()) + # from where it throws TimeoutError, continue to run, and wait for it to finish + result = await delay_task + print("return value:", result) + +asyncio.run(main()) + +""" +start sleep 2 +timeout +whether the task is cancelled: False +end sleep +return value: 2 +""" +``` + +## simple aiohttp download demo + +```python +import asyncio +import os + +import aiohttp + + +async def download_img(session, url): + file_name = os.path.basename(url) + print(f"Downloading:{file_name}") + response = await session.get(url, ssl=False) + content = await response.content.read() + with open(file_name, mode="wb") as file: + file.write(content) + print(f"Done:{file_name}") + + +async def main(): + urls = [ + "https://tenfei05.cfp.cn/creative/vcg/800/new/VCG41560336195.jpg", + "https://tenfei03.cfp.cn/creative/vcg/800/new/VCG41688057449.jpg", + ] + async with aiohttp.ClientSession() as session: + # download_img(session, url) returns a coroutine + tasks = [asyncio.create_task(download_img(session, url)) for url in urls] + await asyncio.wait(tasks) + + +# loop = asyncio.get_event_loop() +# loop.run_until_complete(main()) + +# above commented 2 lines are low level API and could be replaced by +# below asyncio.run() introduced by python 3.7. +# asyncio.get_event_loop() creates new event loop if doesn't exist. +# asyncio.run() raises exception if already in a event loop. +# This function always creates a new event loop and closes it at the end. +# It should be used as a main entry point for asyncio programs, and should +# ideally only be called once. +asyncio.run(main()) +``` + +## aiohttp rate limit example + + + +## run coroutines concurrently as asyncio Tasks + +await coroutines directly will run the coroutines sequentially, so 2 sleeps of 2s takes 4s: + +```python +import asyncio +import time + +print(f"started at {time.strftime('%X')}") +await asyncio.sleep(2) +await asyncio.sleep(2) +print(f"started at {time.strftime('%X')}") + +# output, duration 4s +started at 23:48:19 +started at 23:48:23 +``` + +Wrap the coroutines into tasks to run concurrently, 2 sleeps of 2s takes 2s: + +```python +import asyncio +import time + +print(f"started at {time.strftime('%X')}") + +# create_task() must be inside a running event loop, +# often created by asyncio.run() +task1 = asyncio.create_task(asyncio.sleep(2)) +task2 = asyncio.create_task(asyncio.sleep(2)) + +await task1 +await task2 +# or: await asyncio.wait([task1, task2]) + +print(f"started at {time.strftime('%X')}") + +# output, duration 2s +started at 23:49:08 +started at 23:49:10 +``` + +## schedule task without asyncio.create_task + +The popular asyncio tasks usage is : + +```python +import asyncio +import time + +async def main() + start = time.time() + tasks = [ + asyncio.create_task(asyncio.sleep(2)), + asyncio.create_task(asyncio.sleep(2)), + ] + await asyncio.wait(tasks) + print(time.time() - start) + +asyncio.run(main()) + +# output +2.0010249614715576 +``` + +`asyncio.create_task()` must be run inside a event loop, which is created by `asyncio.run()`. We can also not use `asyncio.create_task()` to create tasks too: + +```python +import asyncio +import time + +coroutines = [ + asyncio.sleep(2), + asyncio.sleep(2) +] + +start = time.time() + +# asyncio.run() creates an event loop, +# then asyncio.wait() wraps the coroutines into tasks. +asyncio.run(asyncio.wait(coroutines)) + +print(time.time() - start) + +# output +2.0026962757110596 +``` + +## wait vs gather + +- [`wait`](https://docs.python.org/3/library/asyncio-task.html#asyncio.wait) is a low-level api, [`gather`](https://docs.python.org/3/library/asyncio-task.html#asyncio.gather) is a high-level api. +- `wait` has more options than `gather`: + - `async def wait(fs, *, loop=None, timeout=None, return_when=ALL_COMPLETED):` + - `def gather(*coros_or_futures, loop=None, return_exceptions=False):` +- `wait` accepts lists of coroutines/Futures (`asyncio.wait(tasks)`), `gather` accepts each element a coroutine/Futures (`asyncio.gather(*tasks)`). +- `wait` returns two `futures` in a tuple: `(done, pending)`, it's a coroutine `async def`. To get the `wait` results: `[d.result() for d in done]`, `gather` returns the results directly, it's a standard `def`. +- `gather` can group tasks, and can also cancel groups of tasks: + + ```python + async def main(): + group1 = asyncio.gather(f1(), f1()) + group2 = asyncio.gather(f2(), f2()) + group1.cancel() + # if return_exceptions=False, `asyncio.exceptions.CancelledError` will be raised, + # if return_exceptions=True, the exception will be returned in the results. + # return_exceptions default value is False + all_groups = await asyncio.gather(group1, group2, return_exceptions=True) + print(all_groups) + ``` + +- If the `wait` task is cancelled, it simply throws an CancelledError and the waited tasks remain intact. Need to call `task.cancel()` to cancel the remaining tasks. If `gather` is cancelled, all submitted awaitables (that have not completed yet) are also cancelled. + +## task.add_done_callback + +```python +import asyncio +from asyncio import Future +from functools import partial + + +async def f1(): + await asyncio.sleep(2) + return "f1" + + +def callback1(future: Future): + print(future.result()) + print("this is callback1") + + +def callback2(t1, future: Future): + print(t1) + print(future.result()) + + +async def main(): + + task1 = asyncio.create_task(f1()) + + # bind callback1 to task1 + task1.add_done_callback(callback1) + + # bind callback2 to task2 with param + task1.add_done_callback(partial(callback2, "this is param t1")) + + # await task1 + tasks = [task1] + await asyncio.wait(tasks) + + +asyncio.run(main()) +``` + +## run_until_complete vs run_forever + +`run_until_complete` is `run_forever` with `_run_until_complete_cb` as callback. + +```python +def _run_until_complete_cb(fut): + if not fut.cancelled(): + exc = fut.exception() + if isinstance(exc, (SystemExit, KeyboardInterrupt)): + # Issue #22429: run_forever() already finished, no need to + # stop it. + return + futures._get_loop(fut).stop() +``` + +## run_in_executor (or to_thread) to run un-asyncable functions + +`to_thread()` calls `loop = events.get_running_loop()` and `loop.run_in_executor()` internally, source code [here](https://github.com/python/cpython/blob/d7dc3d9455de93310ccde13ceafe84d426790a5c/Lib/asyncio/threads.py#L25): + +```python +import asyncio +import time +from concurrent.futures import ThreadPoolExecutor + + +# non asyncable function, will be wrapped into async task by loop.run_in_executor() +def download_img(url): + print(f"Downloading:{url}") + time.sleep(1) + print(f"Downloaded:{url}") + + +async def main(): + executor = ThreadPoolExecutor(2) + + loop = asyncio.get_running_loop() + tasks = [] + for i in range(10): + # ThreadPoolExecutor is also the default executor, set None to use it. + # t = loop.run_in_executor(None, download_img, i) + t = loop.run_in_executor(executor, download_img, i) + tasks.append(t) + + await asyncio.wait(tasks) + + +asyncio.run(main()) +``` + +`run_in_executor()` calls [`ThreadPoolExecutor` by default], and can also use `ProcessPoolExecutor`, source code [here](https://github.com/python/cpython/blob/d7dc3d9455de93310ccde13ceafe84d426790a5c/Lib/asyncio/base_events.py#L835): + +```python +# asyncio.base_events.py +def run_in_executor(self, executor, func, *args): + self._check_closed() + if self._debug: + self._check_callback(func, 'run_in_executor') + if executor is None: + executor = self._default_executor + # Only check when the default executor is being used + self._check_default_executor() + if executor is None: + executor = concurrent.futures.ThreadPoolExecutor( + thread_name_prefix='asyncio' + ) + self._default_executor = executor + return futures.wrap_future( + executor.submit(func, *args), loop=self) +``` diff --git a/docs/posts/2023/2023-09-19-github-actions-cache.md b/docs/posts/2023/2023-09-19-github-actions-cache.md new file mode 100644 index 00000000..0de42e92 --- /dev/null +++ b/docs/posts/2023/2023-09-19-github-actions-cache.md @@ -0,0 +1,102 @@ +--- +authors: +- copdips +categories: +- cicd +- github +- cache +- azure +comments: true +date: + created: 2023-09-19 +description: '' +--- + +# Github Actions: Cache + +## Life span + +Github Actions cache has a life span of [7 days](https://github.com/actions/toolkit/tree/main/packages/cache#actionscache), and the total size of all caches in a repository is limited to 10 GB. + +## Standard Cache + +Cache key should be as specific as possible, so that the post cache restore installation can be reduced or skipped. + +For Python pip install, we could use the following cache key: + +```yaml +- name: Get pip cache dir + run: | + os_version=$(cat /etc/os-release | grep -i "version=" | cut -c9- | tr -d '"' | tr ' ' '_') + github_workflow_full_path="${GITHUB_WORKFLOW_REF%@*}" + python_full_version=$(python -c 'import platform; print(platform.python_version())') + node_major_version=$(node --version | cut -d'.' -f1 | tr -d 'v') + echo "os_version=$os_version" >> $GITHUB_ENV + echo "github_workflow_full_path=$github_workflow_full_path" >> $GITHUB_ENV + echo "python_full_version=$python_full_version" >> $GITHUB_ENV + echo "PIP_CACHE_DIR=$(pip cache dir)" >> $GITHUB_ENV + +- name: cache pip + uses: actions/cache@v3 + with: + # path: ${{ env.PIP_CACHE_DIR }} + path: ${{ env.pythonLocation }} + key: ${{ env.github_workflow_full_path}}-${{ env.os_version }}-${{ env.python_full_version }}-${{ env.node_major_version}}-${{ hashFiles('requirements/*.txt') }} +``` + +The `cache` action repository provides also some [Python caching examples](https://github.com/actions/cache/blob/main/examples.md#python---pip). + +### pip cache dir vs pip install dir + +The `path` parameter in `actions/cache@v3` could be: + +- `${{ env.PIP_CACHE_DIR }}` if you only want to cache the pip cache dir, so you can skip the Python package download step, but you still need to install the packages. +- `${{ env.pythonLocation }}` if you want to cache the whole python installation dir, this is useful when you want to cache the `site-packages` dir, so that the `pip install` step can be reduced or skipped, this is also why we must use the `${{ env.os_version }}`, `${{ env.python_full_version }}` in the cache key. In most of cases, this is the best choice. + +### hashFiles + +In [Azure Pipelines](https://learn.microsoft.com/en-us/azure/devops/pipelines/release/caching?view=azure-devops), there's similar thing as [hashFiles()](https://docs.github.com/en/actions/learn-github-actions/expressions#hashfiles) function, it should be in the form of glob pattern, like `requirements/*.txt`, but without double quotes, otherwise treated as a static string. + +```yaml +# Azure Pipelines +- task: Cache@2 + inputs: + key: 'python | "$(pythonFullVersion)" | "$(osVersion)" | "$(System.TeamProject)" | "$(Build.DefinitionName)" | "$(Agent.JobName)" | requirements/*.txt' + path: ... + displayName: ... +``` + +Otherwise, we can also achieve the same result by some pure bash commands: + +```yaml +# suppose parameters.requirementsFilePathList is a list of file paths +- script: | + echo REQUIREMENTS_FILE_PATH_LIST_STRING: $REQUIREMENTS_FILE_PATH_LIST_STRING + all_files_in_one_line=$(echo $REQUIREMENTS_FILE_PATH_LIST_STRING | jq '. | join(" ")' -r) + echo all_files_in_one_line: $all_files_in_one_line + all_files_md5sum=$(cat $all_files_in_one_line | md5sum | awk '{print $1}') + echo all_files_md5sum: $all_files_md5sum + echo "##vso[task.setvariable variable=pythonRequirementsFilesHash;]$all_files_md5sum" + displayName: Set pythonRequirementsFilesHash + env: + REQUIREMENTS_FILE_PATH_LIST_STRING: "${{ convertToJson(parameters.requirementsFilePathList) }}" +``` + +## Cache with actions/setup-python + +The action [actions/setup-python](https://github.com/actions/setup-python#caching-packages-dependencies) has built-in functionality for caching and restoring dependencies with `cache` key. This cache method can only cache the pip cache dir to reduce the Python packages download time like `path: ${{ env.PIP_CACHE_DIR }}` in above [example](#standard-cache), but still need to install the packages, which is [much slower than caching the package installation location](#pip-cache-dir-vs-pip-install-dir). As the time of writing, the cache source dir (which is the pip cache dir) is generated by the action itself, and cannot be customized. + +The cache key is something like: `setup-python-Linux-22.04-Ubuntu-python-3.10.13-pip-308f89683977de8773e433ddf87c874b6bd931347b779ef0ab18f37ecc4fa914` (copied from workflow run log), which is generated as per this [answer](https://github.com/actions/setup-python/issues/732). + +```yaml +steps: +- uses: actions/checkout@v4 +- uses: actions/setup-python@v4 + with: + python-version: '3.10' + cache: 'pip' # caching pip dependencies, could be pip, pipenv, or poetry + cache-dependency-path: requirements/*.txt +- run: pip install -r requirements.txt +``` + +If `cache-dependency-path` is not specified, and if the cache type is `pip`, it will try to [find all the requirements.txt files](https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python#caching-dependencies) in the repo and hash them to generate the cache key. For cache type with `pipenv` or `poetry`, I didn't test them. diff --git a/docs/posts/2023/2023-09-19-github-actions-custom-actions.md b/docs/posts/2023/2023-09-19-github-actions-custom-actions.md new file mode 100644 index 00000000..c3cc6cd6 --- /dev/null +++ b/docs/posts/2023/2023-09-19-github-actions-custom-actions.md @@ -0,0 +1,76 @@ +--- +authors: +- copdips +categories: +- cicd +- github +- azure +comments: true +date: + created: 2023-09-19 +description: '' +--- + +# Github Actions: Custom Actions + +## Actions checkout location in workflow + +Actions are automatically checked out by Github Action from the beginning of a workflow run, the checkout path could be found by: env var [$GITHUB_ACTION_PATH](https://docs.github.com/en/actions/learn-github-actions/variables#default-environment-variables), github context [${{ github.action_path }}](https://docs.github.com/en/actions/learn-github-actions/contexts#github-context). This is very useful when you need to [reference some files or scripts](https://stackoverflow.com/a/73839061/5095636) saved in the same repository as the actions. + +```yaml + + +```bash + +Actions in workflow: + +```yaml +- name: Check out repository code + uses: actions/checkout@v4 + +- name: Use action in the version of the main branch + uses:{org_name}/{repo_name}/actions/{action_path}@main + +- name: Use action in the version of v1 + uses:{org_name}/{repo_name}/actions/{action_path}@v1 +``` + +Actions checkout location: + +```bash +../../_actions/actions/checkout +├── v4 +│ ├── CHANGELOG.md +│ ├── CODEOWNERS +│ ├── ... + +../../_actions/{org_name}/{repo_name} +├── main +│ ├── README.md +│ └── actions +│ └── ... +├── main.completed +├── v1 +│ ├── README.md +│ └── actions +│ └── ... +└── v1.completed +``` + +## Multiple actions in single repository + +You can save multiple actions inside a single repository, and use them in the form of [`uses: org/repo/folder_path@git_ref`](https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#example-using-a-public-action-in-a-subdirectory) in a workflow. + +## azure/CLI + +Benefits of using azure/CLI over run task: + +1. azure/CLI runs `az` commands in an isolated docker container. +2. azure/CLI can choose the CLI version. +3. For some self-hosted runner, may not have "az cli" pre-installed, the Azure/CLI action eliminates the need for complex installation steps. + +Can also set [shared variables inside a job](https://copdips.com/2023/09/github-actions-variables.html#passing-data-between-steps-inside-a-job) to be used outside the azure/CLI step, even it's run inside a docker container. + +Drawbacks: + +1. slowness: `azure/CLI` is much slower (around 20s to bootstrap on a ubuntu-latest-4core runner) than standard `run` step, because it needs to pull the docker image and run the container. diff --git a/docs/posts/2023/2023-09-19-github-actions-environment.md b/docs/posts/2023/2023-09-19-github-actions-environment.md new file mode 100644 index 00000000..831477b4 --- /dev/null +++ b/docs/posts/2023/2023-09-19-github-actions-environment.md @@ -0,0 +1,49 @@ +--- +authors: +- copdips +categories: +- cicd +- github +comments: true +date: + created: 2023-09-19 +description: '' +--- + +# Github Actions: Environment + +## Dynamic environment + +[environment](https://docs.github.com/en/actions/deployment/targeting-different-environments/using-environments-for-deployment#using-an-environment) is set at job level (not at step level), so we should use the `$GITHUB_OUTPUT` context to set the environment name dynamically, see [here](https://copdips.com/2023/09/github-actions-variables.html#passing-variables) to learn how to pass data between jobs. + +Standard usage for static value is like this: + +```yaml +jobs: + deployment: + runs-on: ubuntu-latest + environment: production + steps: + - name: deploy + # ...deployment-specific steps +``` + +For advanced usage with dynamic value should be like this: + +```yaml +# call reusable workflow set_target_env.yml to set the target_env +jobs: + set_target_env: + uses: ./.github/workflows/set_target_env.yml + deployment: + runs-on: ubuntu-latest + needs: [set_target_env] + environment: + name: ${{ needs.set_target_env.outputs.workflow_output_target_env }} + env: + TARGET_ENV: ${{ needs.set_target_env.outputs.workflow_output_target_env }} + steps: + - run: | + echo "TARGET_ENV: $TARGET_ENV" + # ...other deployment-specific steps based on $TARGET_ENV +``` diff --git a/docs/posts/2023/2023-09-19-github-actions-variables.md b/docs/posts/2023/2023-09-19-github-actions-variables.md new file mode 100644 index 00000000..afd63a71 --- /dev/null +++ b/docs/posts/2023/2023-09-19-github-actions-variables.md @@ -0,0 +1,328 @@ +--- +authors: +- copdips +categories: +- cicd +- github +comments: true +date: + created: 2023-09-19 +description: '' +--- + +# Github Actions: Variables + +## Variables upon Git events + +Suppose we create a new branch named `new_branch`, and create a pull request (with id `123`) from the new branch `new_branch` to the `main` branch. +During the pipeline, we can see following predefined variables in different GIT events. + +!!! note + + Check [here](https://copdips.com/2022/01/azure-pipeline-predefined-variables.html#variables-upon-git-events) for variables upon git events in Azure Pipelines. + +| variable name \ git action | on push | on pull request | on merge (after merge, a push event will be triggered) | on manual trigger | +| ------------------------------------------------- | --------------------- | -------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------ | --------------------- | +| $GITHUB_REF | refs/heads/new_branch | refs/pull/123/merge | refs/heads/main | refs/heads/new_branch | +| $GITHUB_REF_NAME | new_branch | 132/merge | main | new_branch | +| $GITHUB_EVENT_NAME | push | pull_request | pull_request_target | workflow_dispatch | +| $GITHUB_REF_TYPE | branch | branch | branch | branch | +| $GITHUB_SHA | last commit in branch | workflow commit (not merge commit) | merge commit | last commit in branch | +| ${{ github.event.head_commit.message }} | last commit message | VAR_NOT_EXISTS | VAR_NOT_EXISTS | VAR_NOT_EXISTS | +| ${{ github.event.pull_request.merge_commit_sha }} | VAR_NOT_EXISTS | merge commit | merge commit | VAR_NOT_EXISTS | +| ${{ github.event.pull_request.head.sha }} | VAR_NOT_EXISTS | last commit in PR (not merge commit) | last commit in PR (not merge commit) | VAR_NOT_EXISTS | +| ${{ github.event.pull_request.number }} | VAR_NOT_EXISTS | 123 | 123 | VAR_NOT_EXISTS | +| ${{ github.event.number }} | VAR_NOT_EXISTS | 123 | 123 | VAR_NOT_EXISTS | +| ${{ github.event.pull_request.merged }} | VAR_NOT_EXISTS | false | true | VAR_NOT_EXISTS | +| ${{ github.event.pull_request.merged_by.login }} | VAR_NOT_EXISTS | null | user login | VAR_NOT_EXISTS | +| ${{ github.event.pull_request.merged_by.type }} | VAR_NOT_EXISTS | null | User, etc | VAR_NOT_EXISTS | +| ${{ github.event.pull_request.title }} | VAR_NOT_EXISTS | null or pr title | null or pr title | VAR_NOT_EXISTS | +| ${{ github.event.pull_request.body}} | VAR_NOT_EXISTS | null or pr body | null or pr bod | VAR_NOT_EXISTS | +| ${{ github.event.after }} | last SHA in commit | last commit in PR (not merge commit) | VAR_NOT_EXISTS | VAR_NOT_EXISTS | +| ${{ github.event.action}} | VAR_NOT_EXISTS | opened, synchronize, edited, reopned, [etc](https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request).. | closed | VAR_NOT_EXISTS | +| ${{ github.head_ref }} | VAR_NOT_EXISTS | new_branch | new_branch | VAR_NOT_EXISTS | +| ${{ github.base_ref }} | null | main | main | VAR_NOT_EXISTS | + +## Setting environment variables by Python + +Same approach applies to other languages: + +```yaml +- name: Create new env vars by Python + shell: python + run: | + import os + with open(os.environ["GITHUB_ENV"], "a") as f: + f.write("ENV_VAR_1=value_1\nENV_VAR_2=value_2\n") +``` + +## JSON Variables + +### JSON variables with GITHUB_OUTPUT + +When setting a JSON variable in string as `$GITHUB_OUTPUT`, and using it in a subsequent step, we should use the Github actions expressions syntax. However, the method of using this syntax can vary based on its context. Consider the following example on a Github Ubuntu runner with a bash shell: + +```yaml +- name: Write json outputs + id: write-json-outputs + run: | + json_raw='{"name":"foo"}' + json_quotes_escaped="{\"name\":\"foo\"}" + json_quotes_backslash_escaped="{\\\"name\\\":\\\"foo\\\"}" + json_ascii="{\x22name\x22: \x22foo\x22}" + + echo "json_raw=$json_raw" >> $GITHUB_OUTPUT + echo "json_quotes_escaped=$json_quotes_escaped" >> $GITHUB_OUTPUT + echo "json_quotes_backslash_escaped=$json_quotes_backslash_escaped" >> $GITHUB_OUTPUT + echo -e "json_ascii=$json_ascii" >> $GITHUB_OUTPUT + + echo "GITHUB_OUTPUT content:" + cat $GITHUB_OUTPUT + +- name: Show json outputs + run: | + json_raw_wo_quotes=${{ steps.write-json-outputs.outputs.json_raw }} + json_raw="${{ steps.write-json-outputs.outputs.json_raw }}" + json_quotes_escaped="${{ steps.write-json-outputs.outputs.json_quotes_escaped }}" + json_quotes_backslash_escaped="${{ steps.write-json-outputs.outputs.json_quotes_backslash_escaped }}" + json_ascii="${{ steps.write-json-outputs.outputs.json_ascii }}" + + # echo vars from templating inside bash + echo "json_raw_wo_quotes: $json_raw_wo_quotes" + echo "json_raw: $json_raw" + echo "json_quotes_escaped: $json_quotes_escaped" + echo "json_quotes_backslash_escaped: $json_quotes_backslash_escaped" + echo "json_ascii: $json_ascii" + + # echo vars from env variables + echo "JSON_RAW: $JSON_RAW" + echo "JSON_QUOTES_ESCAPED: $JSON_QUOTES_ESCAPED" + echo "JSON_QUOTES_BACKSLASH_ESCAPED: $JSON_QUOTES_BACKSLASH_ESCAPED" + echo "JSON_QUOTES_BACKSLASH_ESCAPED_TO_JSON: $JSON_QUOTES_BACKSLASH_ESCAPED_TO_JSON" + echo "JSON_QUOTES_BACKSLASH_ESCAPED_WITH_QUOTES: $JSON_QUOTES_BACKSLASH_ESCAPED_WITH_QUOTES" + echo "JSON_ASCII: $JSON_ASCII" + env: + JSON_RAW: ${{ steps.write-json-outputs.outputs.json_raw }} + JSON_QUOTES_ESCAPED: ${{ steps.write-json-outputs.outputs.json_quotes_escaped }} + JSON_QUOTES_BACKSLASH_ESCAPED: ${{ steps.write-json-outputs.outputs.json_quotes_backslash_escaped }} + JSON_QUOTES_BACKSLASH_ESCAPED_TO_JSON: ${{ toJson(steps.write-json-outputs.outputs.json_quotes_backslash_escaped) }} + JSON_QUOTES_BACKSLASH_ESCAPED_WITH_QUOTES: "${{ steps.write-json-outputs.outputs.json_quotes_backslash_escaped }}" + JSON_ASCII: ${{ steps.write-json-outputs.outputs.json_ascii }} +``` + +!!! note + + When creating the json string, it would be better to not use blank spaces between keys and values, `json_raw='{"name":"foo"}'` instead of `json_raw='{"name": "foo"}`, in order to prevent from bash variable mangling issue. + +We have the following output: + +```bash +Write json outputs + GITHUB_OUTPUT content: + json_raw={"name":"foo"} + json_quotes_escaped={"name":"foo"} + json_quotes_backslash_escaped={\"name\":\"foo\"} + json_ascii={"name":"foo"} + +Show json outputs + json_raw_wo_quotes: {name:foo} + json_raw: {name:foo} + json_quotes_escaped: {name:foo} + json_quotes_backslash_escaped: {"name":"foo"} + json_ascii: {name:foo} + JSON_RAW: {"name":"foo"} + JSON_QUOTES_ESCAPED: {"name":"foo"} + JSON_QUOTES_BACKSLASH_ESCAPED: {\"name\":\"foo\"} + JSON_QUOTES_BACKSLASH_ESCAPED_TO_JSON: "{\\\"name\\\":\\\"foo\\\"}" + JSON_QUOTES_BACKSLASH_ESCAPED_WITH_QUOTES: {\"name\":\"foo\"} + JSON_ASCII: {"name":"foo"} +``` + +From the output we can see that there're two ways to have a valid json string in the show step: + +```yaml +- name: Show json outputs + run: | + json_quotes_backslash_escaped="${{ steps.write-json-outputs.outputs.json_quotes_backslash_escaped }}" + echo "json_quotes_backslash_escaped: $json_quotes_backslash_escaped" + + # echo vars from env + echo "JSON_RAW: $JSON_RAW" + echo "JSON_QUOTES_ESCAPED: $JSON_QUOTES_ESCAPED" + echo "JSON_ASCII: $JSON_ASCII" + env: + JSON_RAW: ${{ steps.write-json-outputs.outputs.json_raw }} + JSON_QUOTES_ESCAPED: ${{ steps.write-json-outputs.outputs.json_quotes_escaped }} + JSON_ASCII: ${{ steps.write-json-outputs.outputs.json_ascii }} +``` + +Creating a JSON string in GITHUB_OUTPUT without escaping backslashes, like `json_quotes_escaped="{\"name\":\"foo\"}"`, is more concise than `"{\\\"name\\\":\\\"foo\\\"}"`. However, when using `${{ }}` in a bash shell within GitHub Actions, it's not a valid JSON string. This is because the expressions are processed before the bash shell runs the script, replacing the expression with its value and discarding double quotes. This results in an output like `json_raw: {name:foo}`. To address this, the [`toJson`](https://docs.github.com/en/enterprise-cloud@latest/actions/learn-github-actions/expressions#tojson) function can be used to convert the string into valid JSON. + +```yaml +- name: Show json outputs + run: | + # use toJson() to parse the string to a valid json string + json_raw="${{ toJson(steps.write-json-outputs.outputs.json_raw) }}" + json_quotes_escaped="${{ toJson(steps.write-json-outputs.outputs.json_quotes_escaped) }}" + json_ascii="${{ toJson(steps.write-json-outputs.outputs.json_ascii) }}" + + # echo vars from templating inside bash + echo "json_raw: $json_raw" + echo "json_quotes_escaped: $json_quotes_escaped" + echo "json_ascii: $json_ascii" + + # echo vars from env variables + echo "JSON_RAW: $JSON_RAW" + echo "JSON_QUOTES_ESCAPED: $JSON_QUOTES_ESCAPED" + echo "JSON_ASCII: $JSON_ASCII" + env: + JSON_RAW: ${{ steps.write-json-outputs.outputs.json_raw }} + JSON_QUOTES_ESCAPED: ${{ steps.write-json-outputs.outputs.json_quotes_escaped }} + JSON_ASCII: ${{ steps.write-json-outputs.outputs.json_ascii }} +``` + +!!! note + + Check also the [`fromJson`](https://docs.github.com/en/actions/learn-github-actions/expressions#fromjson) function to see how to parse json string to object. + +### Do not create JSON secrets + +When creating a secret, we should not create a JSON secret. For e.g. the Github action `Azure/Login` provides [an example how to pass creds inputs with a JSON secret](https://github.com/Azure/login#sample-workflow-that-uses-azure-login-action-to-run-az-cli): + +```yaml +- uses: azure/login@v1 + with: + creds: ${{ secrets.AZURE_CREDENTIALS }} +``` + +This works but the drawback is that as the curly brackets are stored in the JSON secret, so whenever we want to show `{` or `}` in the Github action logs, they will be replaced by `***`, as Github actions considers the curly brackets are secret chars. This doesn't block the successful run of github workflows, but it's not convenient for debugging. + +A better usage of Azure/Login is also provided in its documentation [here](https://github.com/Azure/login#configure-deployment-credentials): + +```yaml +- uses: Azure/login@v1 + with: + creds: '{"clientId":"${{ secrets.CLIENT_ID }}","clientSecret":"${{ secrets.CLIENT_SECRET }}","subscriptionId":"${{ secrets.SUBSCRIPTION_ID }}","tenantId":"${{ secrets.TENANT_ID }}"}' +``` + +## Parsing variables + +### Parsing variables with object type + +```yaml +- run: | + echo "github.event: ${{ github.event }}" + echo "github.event toJson: $GITHUB_EVENT" + env: + GITHUB_EVENT: ${{ toJson(github.event) }} + +# output: +github.event: Object +github.event toJson: { + after: 9da8166fcc52c437871a2e903b3e200a35c09a1e, + base_ref: null, + before: 1448cfbf10fc149b7d200d0a0e15493f41cc8896, + ... +} +``` + +!!! warning + + `echo "github.event toJson: ${{ toJSON(github.event) }}"` will [raise error](https://github.com/actions/runner/issues/1656#issuecomment-1030077729), must parse the variable to environment variable `$GITHUB_EVENT` at first. So when using `toJson` method to parse object type variable, it is recommended to send the value to an environment variable first. + +### Parsing variables with boolean type + +Check with `if`: + +```yaml +on: + workflow_dispatch: + inputs: + print_tags: + description: 'True to print to STDOUT' + required: true + type: boolean + +jobs: + print-tag: + runs-on: ubuntu-latest + # all the 4 syntaxes below are valid + if: inputs.print_tags + if: ${{ inputs.print_tags }} + if: inputs.print_tags == true + if: ${{ inputs.print_tags == true}} + steps: + - name: Print the input tag to STDOUT + run: echo The tags are ${{ inputs.tags }} + - name: Print the input tag to STDOUT + # in bash, compare boolean with string value + run: | + if [[ "${{ inputs.print_tags }}" == "true" ]]; then + echo The tags are ${{ inputs.tags }} + else + echo "print_tags is false" + fi + if [[ "$PRINT_TAGS" == "true" ]]; then + echo The tags are ${{ inputs.tags }} + else + echo "print_tags is false" + fi + env: + PRINT_TAGS: ${{ inputs.print_tags }} +``` + +!!! warning + + Never use `if: ${{ inputs.print_tags }} == false` with `==` outside of `{{}}`, it will always be true. + +## Passing variables + +### Passing data between steps inside a job + +#### Passing by $GITHUB_ENV between steps + +You can make an environment variable available to any subsequent steps in a workflow job by defining or updating the environment variable and writing this to the [GITHUB_ENV](https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#setting-an-environment-variable) environment file. + +```yaml +- run: echo "var_1=value1" >> $GITHUB_ENV +- run: echo "var_1: $var1" +``` + +#### Passing by $GITHUB_OUTPUT between steps + +Sets a step's [output parameter](https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#setting-an-output-parameter). Note that the step will need an id to be defined to later retrieve the output value + +### Passing data between jobs inside a workflow + +#### Passing by artifacts between jobs + +You can use the [upload-artifact and download-artifact actions](https://docs.github.com/en/actions/using-workflows/storing-workflow-data-as-artifacts#passing-data-between-jobs-in-a-workflow) to share data (in the forms of a file) between jobs in a workflow. + +To share variables, you can save the variables in a file with format: + +```bash +VAR_1=value1 +VAR_2=value2 +``` + +Then download the file from another job and source it to load the variables: + +```yaml +- run: | + sed "" {downloaded_file_path} >> $GITHUB_ENV + shell: bash +``` + +#### Passing by $GITHUB_OUTPUT between jobs + +[https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idoutputs](https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idoutputs) + +### Passing data between caller workflow and called (reusable) workflow + +Use [on.workflow_call.outputs](https://docs.github.com/en/actions/using-workflows/reusing-workflows#using-outputs-from-a-reusable-workflow), called workflow outputs are available to all downstream jobs in the caller workflow. + +### Passing data between irrelevant workflows + +- [https://github.com/actions/download-artifact/issues/3#issuecomment-580658517](https://github.com/actions/download-artifact/issues/3#issuecomment-580658517) +- [https://github.com/actions/download-artifact/issues/3#issuecomment-1017141067](https://github.com/actions/download-artifact/issues/3#issuecomment-1017141067) +- [https://github.com/dawidd6/action-download-artifact](https://github.com/dawidd6/action-download-artifact) diff --git a/docs/posts/2023/2023-09-20-github-actions-error-handling.md b/docs/posts/2023/2023-09-20-github-actions-error-handling.md new file mode 100644 index 00000000..aed14a25 --- /dev/null +++ b/docs/posts/2023/2023-09-20-github-actions-error-handling.md @@ -0,0 +1,30 @@ +--- +authors: +- copdips +categories: +- cicd +- github +comments: true +date: + created: 2023-09-20 +description: '' +--- + +# Github Actions : Error handling + +## continue-on-error vs fail-fast + +The [doc](https://docs.github.com/en/actions/using-jobs/using-a-matrix-for-your-jobs#handling-failures) explains that `continue-on-error` applies to a [single job](https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idcontinue-on-error) or [single step](https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepscontinue-on-error) which defines whether a job or step can continue on its error, while `fail-fast` applies to the entire matrix which means if the failure of a job in the matrix can stop other running jobs in the matrix. For example: + +- if `fail-fast` is set to `true`, the entire matrix will stop running when one job fails. But if the failed job has `continue-on-error` set to `true`, the matrix will continue running, as the failed job is not considered as a failure. +- if `fail-fast` is set to `false`, all the jobs triggered by the matrix are considered independent, so the failed job will not affect other jobs. + +!!! note + + When setting `continue-on-error` at [job level](https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idcontinue-on-error) only, and no set at [step level](https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepscontinue-on-error), if one of the steps fails, the remaining steps wont be executed, the job will get a red failure badge in the Github Actions UI, but the job status will be considered as success. + +## Status check functions + +We can also use [status check functions](https://docs.github.com/en/actions/learn-github-actions/expressions#status-check-functions) `if ${{ success() }}, if: ${{ always() }}, if: ${{ cancelled() }}, if: ${{ failure() }}` to check the previous step (**or job**) status. + +In `if` expression, we can skip the double curly brackets `${{}}`, for example: `if: success()` instead of `if: ${{ success() }}` diff --git a/docs/posts/2023/2023-09-21-github-actions-workflows.md b/docs/posts/2023/2023-09-21-github-actions-workflows.md new file mode 100644 index 00000000..ed37ad2b --- /dev/null +++ b/docs/posts/2023/2023-09-21-github-actions-workflows.md @@ -0,0 +1,32 @@ +--- +authors: +- copdips +categories: +- cicd +- github +comments: true +date: + created: 2023-09-21 +description: '' +--- + +# Github Actions: Workflows + +## Reusable workflows + +### Re-run a reusable workflow + +[If reusable workflow is not referenced by SHA](https://docs.github.com/en/actions/managing-workflow-runs/re-running-workflows-and-jobs#re-running-workflows-and-jobs-with-reusable-workflows), for example a branch name, when re-run a workflow, it will not use the latest version of the workflow in that branch, but the same commit SHA of the first attempt. Which means, if you use the git amend push to overwrite the old commit history, the workflow re-run will fail as it cannot find the specific SHA version of the workflow. + +In contrary, if an action is referenced by branch name, it will always use the latest version of the action in that branch upon re-run. + +## Cancelling a workflow + +To cancel the current workflow run inside the run itself: + +```yaml +- name: cancelling + uses: andymckay/cancel-action@0.3 +``` + +We can use `if: cancelled()` or `if: always()` to bypass the workflow cancel signal. diff --git a/docs/posts/2023/2023-09-22-databricks-python-pip-authentication.md b/docs/posts/2023/2023-09-22-databricks-python-pip-authentication.md new file mode 100644 index 00000000..300e5f1e --- /dev/null +++ b/docs/posts/2023/2023-09-22-databricks-python-pip-authentication.md @@ -0,0 +1,21 @@ +--- +authors: +- copdips +categories: +- databricks +- python +- pip +- auth +comments: true +date: + created: 2023-09-22 +description: '' +--- + +# Databricks Python pip authentication + +Before the Databricks Unit Catalog's release, we used init scripts to generate the `pip.conf` file during cluster startup, allowing each cluster its unique auth token. But with init scripts no longer available in the Unit Catalog's **shared mode**, an alternative approach is required. + +A workaround involves placing a prepared `pip.conf` in the Databricks workspace and setting the `PIP_CONFIG_FILE` environment variable to point to this file. This method, however, presents security concerns: the `pip.conf` file, containing the auth token, becomes accessible to the entire workspace, potentially exposing it to all users and clusters. See [here](https://github.com/databrickslabs/dbx/issues/739#issuecomment-1730308586) to check this workaround. + +In contrast, the Unit Catalog's **single mode** retains init script availability. Here, the pip auth token is stored securely in a vault and accessed via the Databricks secret scope. Upon cluster startup, the init script fetches the token from the vault, generating the `pip.conf` file. This approach is considerably more secure than the shared mode alternative. diff --git a/docs/posts/2023/2023-09-22-github-actions-python.md b/docs/posts/2023/2023-09-22-github-actions-python.md new file mode 100644 index 00000000..e2c3af90 --- /dev/null +++ b/docs/posts/2023/2023-09-22-github-actions-python.md @@ -0,0 +1,145 @@ +--- +authors: +- copdips +categories: +- cicd +- github +- python +- pip +- auth +- azure +comments: true +date: + created: 2023-09-22 +description: '' +--- + +# Github Actions: Python + +## Setting up pip authentication + +### PIP_INDEX_URL vs PIP_EXTRA_INDEX_URL + +In most cases, when setting up private Python package artifacts (like Azure DevOps Artifacts, JFrog Artifactory, etc.) are configured to mirror the public PyPi. In such scenarios, we only need to use `PIP_INDEX_URL` to point to these private artifacts. + +However, some people might use `PIP_INDEX_URL` point to the public PyPi, and `PIP_EXTRA_INDEX_URL` to point to the private artifacts. This approach is not recommended, as it results in the public PyPi searched first, followed by the private artifacts. This poses a security risk where a malicious actor can publish a package with the same name as your private one on the public PyPi. + +### Auth for Azure DevOps Artifacts + +#### Auth by Azure SPN crendentials + +In March 2023, there was a great news that Azure Service Principal was been [introduced in Azure DevOps](https://learn.microsoft.com/en-us/azure/devops/release-notes/2023/sprint-219-update#service-principal-and-managed-identity-support-in-azure-devops-public-preview), eliminating the use of service account. + +1. Create a service principal in Azure Active Directory. +2. Add the service principal to the Azure DevOps Artifacts feed with `Contributor` role. Package publishing (twine upload) needs `Contributor` role, but package installation (pip install) only needs `Reader` role. +3. Add SPN credentials to Github Secrets with name `AZURE_CREDENTIALS`, and value in JSON format: + + ```json + { + "clientId": "xxxxx", + "clientSecret": "xxxxx", + "subscriptionId": "xxxxx", + "tenantId": "xxxxx" + } + ``` + +4. Create env var `PIP_INDEX_URL` in the workflow, and set it to the Azure DevOps Artifacts feed URL. + + ```yaml + - uses: actions/checkout@v4 + + + - name: Setup Python + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + # see below post of a faster Python cache: + # https://copdips.com/2023/09/github-actions-cache.html#pip-cache-dir-vs-pip-install-dir + cache: pip + cache-dependency-path: requirements/*.txt + + - name: Azure Login + uses: azure/login@v1 + with: + creds: ${{ secrets.AZURE_CREDENTIALS }} + + - name: Setup Python package feed + run: | + access_token=$(az account get-access-token | jq .accessToken -r) + + # setup pip auth + echo "PIP_INDEX_URL=https://:$access_token@pkgs.dev.azure.com/{azdo_org_name}/_packaging/{azdo_artifacts_feed_name}/pypi/simple/" >> $GITHUB_ENV + + # setup twine auth + cat > ~/.pypirc <> $GITHUB_ENV + + - name: Install dependencies + run: | + pip install -U pip + pip install -r requirements/requirements.txt + + - name: Build Python package + run: | + # need to install wheel in advance + python setup.py sdist bdist_wheel + # modern Python uses `python -m build` instead + + # alternative Python package build and check + - name: Build and Check Package + uses: hynek/build-and-inspect-python-package@v1.5 + + - name: Publish Python package by twine + run: | + # need to install twine in advance + twine upload -r {azdo_artifacts_feed_name} dist/*.whl + + # alternative Python package publish + - name: Publish Python package by action + # does not need to install twine in advance + uses: pypa/gh-action-pypi-publish@release/v1 + with: + repository-url: "https://pkgs.dev.azure.com/{azdo_org_name}/_packaging/{azdo_artifacts_feed_name}/pypi/upload" + password: ${{ env.ACCESS_TOKEN }} + + - name: Cleanup secret envs + run: | + echo "PIP_INDEX_URL=" >> $GITHUB_ENV + echo "ACCESS_TOKEN=" >> $GITHUB_ENV + ``` + +#### Auth by Azure OpenID Connect (OIDC) + +We can also [setup OpenID Connect (OIDC) between Github Action and Azure](https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/configuring-openid-connect-in-azure). It's practical because we do not need to worry about Azure SPN secret rotation. However, a drawback is that when setting up OIDC, we must add a [filter](https://learn.microsoft.com/en-us/azure/developer/github/connect-from-azure?tabs=azure-cli%2Clinux#add-federated-credentials) (`subject` field in the `credential.json`). This could be a branch name, tag name, pull request, or environment name, we can not use wildcards in the filter, so we have to set up OIDC for each branch, tag, pull request or environment as needed. This is not very practical. For AWS, there's no such limitation. + +To use Azure OIDC with Github Action, we need to add the following to the workflow: + +```yaml +... +permissions: + id-token: write + contents: read + +jobs: + a_job: + ... + steps: + - name: Azure login by OIDC + uses: azure/login@v1 + with: + # Official doc puts these 3 fields in secrets, but it's not necessary, + # as `subject` field in the credential.json prevents other repos from + # using the same credential. And these are not sensitive info neither. + tenant-id: ${{ vars.AZURE_TENANT_ID }} + subscription-id: ${{ vars.AZURE_SUBSCRIPTION_ID }} + client-id: ${{ vars.AZURE_CLIENT_ID }} +``` diff --git a/docs/posts/2023/2023-10-16-github-actions-get-azure-keyvault-secrets-action.md b/docs/posts/2023/2023-10-16-github-actions-get-azure-keyvault-secrets-action.md new file mode 100644 index 00000000..01255f15 --- /dev/null +++ b/docs/posts/2023/2023-10-16-github-actions-get-azure-keyvault-secrets-action.md @@ -0,0 +1,57 @@ +--- +authors: +- copdips +categories: +- cicd +- github +- python +- async +- azure +- vault +comments: true +date: + created: 2023-10-16 +description: '' +--- + +# Github Actions: copdips/get-azure-keyvault-secrets-action + +Recently, I began a new project that requires migrating some process from Azure Pipelines to Github Actions. One of the tasks involves retrieving secrets from Azure Key Vault. + +In Azure Pipelines, we have an official task called [AzureKeyVault@2](https://docs.microsoft.com/en-us/azure/devops/pipelines/tasks/deploy/azure-key-vault?view=azure-devops) designed for this purpose. However, its official counterpart in Github Actions, [Azure/get-keyvault-secrets@v1](https://github.com/Azure/get-keyvault-secrets), has been deprecated. The recommended alternative is [Azure CLI](https://github.com/Azure/get-keyvault-secrets#deprecation-notice). While Azure CLI is a suitable option, it operates in a bash shell without multithreading. If numerous secrets need to be fetched, this can be time-consuming. + +Over the past weekend, I decided to write my own action using Python, leveraging `asyncio`. I avoided any additional third party Python modules like `requests`, `aiohttp`, or `httpx`, so no pip install needed. + +As anticipated, the pure Python solution is notably faster than using the Azure CLI, and even surpasses the speed of the Azure Pipelines task `AzureKeyVault@2`. In my tests, it was able to retrieve the all the secrets from an Azure Key Vault within seconds. + +The source code is at: [copdips/get-azure-keyvault-secrets-action](https://github.com/copdips/get-azure-keyvault-secrets-action) + +And hereunder is the usage: + +```yaml +# in the calling workflow, user should first login to Azure +- uses: Azure/login@v1 + with: + # creds: ${{secrets.AZURE_CREDENTIALS}} is not recommended due to json secrets security concerns. + creds: '{"clientId":"${{ secrets.CLIENT_ID }}","clientSecret":"${{ secrets.CLIENT_SECRET }}","subscriptionId":"${{ secrets.SUBSCRIPTION_ID }}","tenantId":"${{ secrets.TENANT_ID }}"}' + +- name: Get Azure KeyVault secrets + id: get-azure-keyvault-secrets + uses: copdips/get-azure-keyvault-secrets-action@v1 + with: + keyvault: {your_azure_keyvault_name} + +# Suppose there's a secret named client-secret in the Azure Key Vault, +# so an env var named CLIENT_SECRET should be created by the action. +# You won't see the secret value in the workflow log as it's masked by Github automatically. +- name: Use secrets from env var + run: | + echo $CLIENT_SECRET + echo ${{ env.CLIENT_SECRET }} + +- name: Use secrets from output + run: | + echo $JSON_SECRETS | jq .CLIENT_SECRET -r + env: + JSON_SECRETS: ${{ steps.get-azure-keyvault-secrets.outputs.json }} +``` diff --git a/docs/posts/2023/2023-10-21-hashing-files.md b/docs/posts/2023/2023-10-21-hashing-files.md new file mode 100644 index 00000000..2ec99aa0 --- /dev/null +++ b/docs/posts/2023/2023-10-21-hashing-files.md @@ -0,0 +1,78 @@ +--- +authors: +- copdips +categories: +- cicd +- github +- azure +- shell +- cache +comments: true +date: + created: 2023-10-21 +description: '' +--- + +# Hashing files + +During CI/CD processes, and particularly during CI, we frequently hash dependency files to create cache keys (referred to as `key` input in Github Action [actions/cache](https://github.com/actions/cache) and `key` parameter in Azure pipelines [Cache@2 task](https://learn.microsoft.com/en-us/azure/devops/pipelines/tasks/reference/cache-v2?view=azure-pipelines)). However, the default hash functions come with certain limitations like [this comment](https://github.com/orgs/community/discussions/25761#discussioncomment-6508758). To address this, we can use the following pure Bash shell command to manually generate the hash value. + +For Github Actions, we can use following snippet: + +```yaml +# github actions example +inputs: + req-files: + description: > + requirements files separated by comma or space, glob pattern is allowed. + e.g. "requirements/*.txt, requirements.txt" + required: true +runs: + using: "composite" + steps: + - name: Compute hash key + shell: bash + env: + REQ_FILES: ${{ inputs.req-files }} + run: | + files=$(echo "$REQ_FILES" | tr "," " " | while read pattern ; do ls $pattern; done) + files_sep_by_space="" + for file in $files; do + files_sep_by_space="$files_sep_by_space $(ls $file | tr '\n' ' ')" + done + files_sep_by_space=$(echo $files_sep_by_space | tr ' ' '\n' | sort | uniq | tr '\n' ' ') + files_hash=$(cat $files_sep_by_space | md5sum | awk '{print $1}') + echo "files_hash: $files_hash" +``` + +For Azure pipelines, the process is nearly identical to the above Github Action example. The only difference is that we first need to convert the `reqFiles` parameter from an object to a string. But if you set the parameter type to `string` (as in the Github Action), the process becomes identical. + +```yaml +# azure pipelines example +parameters: + - name: reqFiles + displayName: > + requirements files, glob pattern is allowed. + e.g.: + - requirements/*.txt + - requirements.txt + type: object + steps: + - script: | + req_files_pattern_string=$(echo "$REQ_FILES_JSON" | jq '. | join(",")' -r) + files=$(echo $req_files_pattern_string | tr "," " " | while read pattern ; do ls $pattern; done) + files_sep_by_space="" + for file in $files; do + files_sep_by_space="$files_sep_by_space $(ls $file | tr '\n' ' ')" + done + files_sep_by_space=$(echo $files_sep_by_space | tr ' ' '\n' | sort | uniq | tr '\n' ' ') + files_hash=$(cat $files_sep_by_space | md5sum | awk '{print $1}') + echo "files_hash: $files_hash" + displayName: Compute hash key + env: + REQ_FILES_JSON: "${{ convertToJson(parameters.reqFiles) }}" +``` + +!!! note + + When creating the cache key, we also need to include os version, the one provided by Github action and Azure pipelines environment vars are not precise enough, they do not give patch version number. We can generate the **full os version** by the following command `cat /etc/os-release | grep -i "version=" | cut -c9- | tr -d '"' | tr ' ' '_'`. diff --git a/docs/posts/2023/2023-11-08-github-actions-bash-shell-pipefail.md b/docs/posts/2023/2023-11-08-github-actions-bash-shell-pipefail.md new file mode 100644 index 00000000..38b1a538 --- /dev/null +++ b/docs/posts/2023/2023-11-08-github-actions-bash-shell-pipefail.md @@ -0,0 +1,47 @@ +--- +authors: +- copdips +categories: +- cicd +- github +- shell +comments: true +date: + created: 2023-11-08 +description: '' +--- + +# Github Actions: Bash shell -e -o pipefail + +Bash shell in Github actions by default is run with `-e -o pipefail` option. The full command used by Github actions is : + +```bash +shell: /usr/bin/bash --noprofile --norc -e -o pipefail {0} +``` + +`-o pipefail` means that if any command in a pipeline fails, that return code will be used as the return code of the whole pipeline. And due to `-e` option, this makes the shell exit immediately if a command within the script exits with a non-zero status (i.e., fails). This is a good thing, but it can be a problem if you want to ignore the return code of a command in a pipeline. And especially in Github Actions output, you cannot see the error message of the command that failed. Github Actions just shows a generic error message: "**Error: Process completed with exit code 1.**", which makes it hard to debug. + +For example, following command: + +- will **success** even `grep` does not find anything in a standard bash shell (your local VM for example) +- but will **fail** in Github actions bash shell with the error message "**Error: Process completed with exit code 1.**". + +```bash +echo "hello world" | grep "foo" | wc -l +``` + +If you do not care about the grep result, to bypass this, you can use `|| true` at the end of the `grep` command. This will make sure that the return code of the grep command is always 0. + +```bash +echo "hello world" | { grep "foo" || true ; } | wc -l +``` + +To verify current bash shell options, you can use `set -o` command: + +```bash +set -o +# or +$ set -o | grep -E "errexit|pipefail" +errexit off +pipefail off +``` diff --git a/docs/posts/2023/2023-11-18-Some-nice-CICD-bash-common-scripts.md b/docs/posts/2023/2023-11-18-Some-nice-CICD-bash-common-scripts.md new file mode 100644 index 00000000..36d09dd0 --- /dev/null +++ b/docs/posts/2023/2023-11-18-Some-nice-CICD-bash-common-scripts.md @@ -0,0 +1,25 @@ +--- +authors: +- copdips +categories: +- cicd +- github +- azure +- shell +comments: true +date: + created: 2023-11-18 +description: '' +--- + +# Some nice cicd bash common scripts + +During CICD, we often have a large log output, it might be nice to have some common scripts to help us to format the log output, so that we can easily find the information we need. + +Recently, when working with Sonar, I found that they have some scripts for such output formatting. + +- Common scripts: [common.sh](https://github.com/SonarSource/sonarqube-quality-gate-action/blob/master/script/common.sh) +- Source common script: [`source "$(dirname "$0")/common.sh"`](https://github.com/SonarSource/sonarqube-quality-gate-action/blob/f9fe214a5be5769c40619de2fff2726c36d2d5eb/script/check-quality-gate.sh#L3C9-L3C9) +- Also the Bash testing framework Bats: + - [Installing Bats in CICD](https://github.com/SonarSource/sonarqube-quality-gate-action/blob/f9fe214a5be5769c40619de2fff2726c36d2d5eb/.github/workflows/run-qa.yml#L17-L28) + - [Testing Bash script: check-quality-gate-test.bats](https://github.com/SonarSource/sonarqube-quality-gate-action/tree/master/test) diff --git a/docs/posts/2023/2023-11-18-github-actions-deploy-static-files-to-azure-web-app.md b/docs/posts/2023/2023-11-18-github-actions-deploy-static-files-to-azure-web-app.md new file mode 100644 index 00000000..6cdc2180 --- /dev/null +++ b/docs/posts/2023/2023-11-18-github-actions-deploy-static-files-to-azure-web-app.md @@ -0,0 +1,65 @@ +--- +authors: +- copdips +categories: +- cicd +- github +- azure +comments: true +date: + created: 2023-11-18 +description: '' +--- + +# Github actions: deploy static files to azure web app + +Although Azure provides already a [GitHub Actions for Azure Web App](https://learn.microsoft.com/en-us/azure/app-service/deploy-github-actions?tabs=userlevel) to deploy static files to Azure Web App, but we can also do it ourselves with a azure cli command. + +Suppose the static files are generated in a folder named `site`, then the above Azure doc says we can use the following command to deploy it to Azure Web App: + +```yaml +# action actions/checkout should be run before this step +- name: Set Web App runtime + run: | + az webapp config set \ + --resource-group ${{ inputs.mkdocs-azure-resource-group-name }} \ + --name ${{ inputs.mkdocs-azure-app-name }} \ + --linux-fx-version "STATICSITE|1.0" + +- name: Run Azure webapp deploy action using Azure Credentials + uses: azure/webapps-deploy@v2 + with: + app-name: ${{ inputs.mkdocs-azure-app-name }} + package: site +```` + +!!! note + + We manually set the web app runtime to `STATICSITE|1.0` as users might created the web app with other runtime (`STATICSITE|1.0` is not selectable during the standard Web App resource creation except they chose specifically the [Static Web App](https://devblogs.microsoft.com/devops/comparing-azure-static-web-apps-vs-azure-webapps-vs-azure-blob-storage-static-sites/) resource), as we're pushing static files, we should set the runtime to `STATICSITE|1.0`. + +!!! warning + + The [Azure doc](https://learn.microsoft.com/en-us/azure/app-service/deploy-github-actions?tabs=userlevel) uses azure/webapps-deploy@`v2` instead of latest azure/webapps-deploy@`v3`, after some tests, `v3` version has some bug on reboot as post deployment step. See [this issue](https://github.com/Azure/webapps-deploy/issues/379) for more details. + +The above github action could be replaced with: + +```yaml +# action actions/checkout should be run before this step +- name: Run Azure Cli using Azure Credentials + run: | + cd site && zip -r ../site.zip * && cd .. + + az webapp config set \ + --resource-group ${{ inputs.mkdocs-azure-resource-group-name }} \ + --name ${{ inputs.mkdocs-azure-app-name }} \ + --linux-fx-version "STATICSITE|1.0" + + az webapp deployment source config-zip \ + --resource-group ${{ inputs.mkdocs-azure-resource-group-name }} \ + --name ${{ inputs.mkdocs-azure-app-name }} \ + --src site.zip +``` + +!!! note + + When using `az webapp deploy --src-path site.zip --type zip` instead of `az webapp deployment source config-zip`, we get the exact same reboot error as azure/webapps-deploy@`v3`. However `v3` action updates the web app with the new version despite the reboot error, but `az webapp deploy` does not update the web app with the new version. diff --git a/docs/stylesheets/extra.css b/docs/stylesheets/extra.css new file mode 100644 index 00000000..8b29d901 --- /dev/null +++ b/docs/stylesheets/extra.css @@ -0,0 +1,82 @@ +/* .md-footer { + --md-footer-bg-color: #1368ca; +} */ + +/* inline code */ +.md-typeset p > code { + background-color: #c9c6c6; + color: #0d0d0d; +} + +/* https://blog.ktz.me/making-mkdocs-tables-look-like-github-markdown-tables/ */ + +th, td { + border: 1px solid var(--md-typeset-table-color); + border-spacing: 0; + border-bottom: none; + border-left: none; + border-top: none; +} + +.md-typeset__table { + line-height: 1; +} + +.md-typeset__table table:not([class]) { + font-size: .74rem; + border-right: none; +} + +.md-typeset__table table:not([class]) td, +.md-typeset__table table:not([class]) th { + padding: 9px; +} + +thead > tr { + background-color: #c9c6c6; +} +[data-md-color-scheme="slate"] thead > tr { + background-color: hsla(var(--md-hue),25%,25%,1) +} + +/* light mode alternating table bg colors */ +.md-typeset__table tr:nth-child(2n) { + background-color: #f8f8f8; +} + +[data-md-color-scheme="slate"] .md-typeset__table tr:nth-child(2n) { + background-color: hsla(var(--md-hue),25%,25%,1) +} + +/* https://github.com/squidfunk/mkdocs-material/issues/5061 */ +.md-typeset table:not([class]) tbody tr:hover { + background-color: #dfdbd6; + color: #0d0d0d; +} + +h1, h2 { + font-weight: bold !important; +} + +/* header menu */ +.md-tabs__item--active{ + font-weight: bold; +} + +/* navigation */ +.md-nav__link--active { + font-weight: bold; + border-radius: 7px; + /* background-color: #eee; */ +} + +/* a.md-nav__link { + padding: 1px 2px; +} +*/ + +a.md-nav__link:hover { + background-color: #dfe7f5; + border-radius: 7px; + padding: 1px 2px; +} diff --git a/draft/migrating_from_jekyll_to_mkdocs_material.md b/draft/migrating_from_jekyll_to_mkdocs_material.md new file mode 100644 index 00000000..b63bbc1d --- /dev/null +++ b/draft/migrating_from_jekyll_to_mkdocs_material.md @@ -0,0 +1,15 @@ +# Migrating from Jekyll to Mkdocs-Material + +## Post URL + +Mkdocs-Material compute post url slug by keeping hyphen `-`, while Jekyll discards it. So given title `Github - Test`. Jekyll will generate `github-test`, while Mkdocs-Material will generate `github---test`. + +VSCode replace: + +Source: `^(#[^#].*?) - (.*?)` +Replace: `$1: $2` +Files to include: `./docs/posts` + +## Image path + +Use VSCode replace diff --git a/mkdocs.yml b/mkdocs.yml new file mode 100644 index 00000000..32d93e01 --- /dev/null +++ b/mkdocs.yml @@ -0,0 +1,166 @@ +--- +repo_name: copdips/copdips.github.io +repo_url: https://github.com/copdips/copdips.github.io +site_author: Xiang ZHU +site_name: A code to remember +site_url: https://copdips.com +use_directory_urls: false + +# Additional configuration +extra: + analytics: + property: G-L9KPHRQNQN + provider: google + social: + - icon: fontawesome/brands/github + link: https://github.com/copdips + - icon: fontawesome/brands/linkedin + link: https://www.linkedin.com/in/xiang-zhu-13769311/ +extra_css: + - stylesheets/extra.css +extra_javascript: + - https://unpkg.com/tablesort@5.3.0/dist/tablesort.min.js + - javascripts/tablesort.js + +markdown_extensions: + - admonition + - attr_list + - md_in_html + - pymdownx.caret + - pymdownx.details + - pymdownx.highlight: + anchor_linenums: true + - pymdownx.inlinehilite + - pymdownx.magiclink + - pymdownx.mark + - pymdownx.snippets + - pymdownx.superfences: + custom_fences: + - class: mermaid + name: mermaid + format: !!python/name:pymdownx.superfences.fence_code_format "" + - pymdownx.tilde + - pymdownx.tabbed: + alternate_style: true + - tables + - toc: + permalink: '#' + +plugins: + # - meta # ! insider only, and must put before blog plugin + - blog: + blog_dir: . + post_url_date_format: yyyy/MM + categories_allowed: + - api + - ast + - async + - auth + - azure + - backup + - cache + - certificate + - cicd + - container + - databricks + - datetime + - docker + - elastic + - encoding + - file + - flask + - git + - github + - gitlab + - jekyll + - linter + - linux + - markdown + - migration + - multithreading + - network + - package + - pandas + - pip + - powershell + - proxy + - python + - redis + - scheduling + - shell + - sonar + - spark + - sqlalchemy + - storage + - ubuntu + - unittest + - vault + - vmware + - vscode + - web + - rss: + categories: + - categories + - tags + date_from_meta: + as_creation: date + match_path: blog/posts/.* + - search: + separator: '[\s\u200b\-_,:!=\[\]()"`/]+|\.(?!\d)|&[lg]t;|(?!\b)(?=[A-Z][a-z])' + - autorefs + - tags + # - git-revision-date-localized: + # enable_creation_date: true + # type: timeago + # fallback_to_build_date: true + - mkdocstrings: + handlers: + python: + paths: [..] + - mkdocs-video + - glightbox + - minify: + minify_html: true + +theme: + custom_dir: docs/overrides + favicon: assets/favicon.ico + features: + - content.action.edit + - content.action.view + - content.code.annotate + - content.code.copy + - header.autohide + - navigation.expand + - navigation.footer + - navigation.indexes + - navigation.sections + - navigation.tabs + - navigation.tabs.sticky + - navigation.top + - navigation.tracking + - search.highlight + - search.share + - search.suggest + - toc.follow + font: + code: Roboto Mono + text: Roboto + icon: + logo: logo + logo: assets/favicon.ico + logo_bottom: assets/favicon.ico + name: material + palette: + - accent: indigo + primary: indigo + scheme: default + toggle: + icon: material/weather-night + name: Switch to dark mode + - accent: indigo + primary: black + scheme: slate + toggle: + icon: material/brightness-7 + name: Switch to light mode diff --git a/package-lock.json b/package-lock.json deleted file mode 100644 index 761910a1..00000000 --- a/package-lock.json +++ /dev/null @@ -1,1534 +0,0 @@ -{ - "name": "minimal-mistakes", - "version": "4.24.0", - "lockfileVersion": 2, - "requires": true, - "packages": { - "": { - "name": "minimal-mistakes", - "version": "4.24.0", - "license": "MIT", - "devDependencies": { - "npm-run-all": "^4.1.5", - "onchange": "^7.1.0", - "uglify-js": "^3.13.6" - }, - "engines": { - "node": ">= 0.10.0" - } - }, - "node_modules/@blakeembrey/deque": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/@blakeembrey/deque/-/deque-1.0.5.tgz", - "integrity": "sha512-6xnwtvp9DY1EINIKdTfvfeAtCYw4OqBZJhtiqkT3ivjnEfa25VQ3TsKvaFfKm8MyGIEfE95qLe+bNEt3nB0Ylg==", - "dev": true - }, - "node_modules/@blakeembrey/template": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@blakeembrey/template/-/template-1.0.0.tgz", - "integrity": "sha512-J6WGZqCLdRMHUkyRG6fBSIFJ0rL60/nsQNh5rQvsYZ5u0PsKw6XQcJcA3DWvd9cN3j/IQx5yB1fexhCafwwUUw==", - "dev": true - }, - "node_modules/ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", - "dev": true, - "dependencies": { - "color-convert": "^1.9.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/anymatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.2.tgz", - "integrity": "sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg==", - "dev": true, - "dependencies": { - "normalize-path": "^3.0.0", - "picomatch": "^2.0.4" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/arg": { - "version": "4.1.3", - "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", - "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==", - "dev": true - }, - "node_modules/balanced-match": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", - "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=", - "dev": true - }, - "node_modules/binary-extensions": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", - "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "dev": true, - "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dev": true, - "dependencies": { - "fill-range": "^7.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/chalk": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", - "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", - "dev": true, - "dependencies": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/chokidar": { - "version": "3.5.1", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.1.tgz", - "integrity": "sha512-9+s+Od+W0VJJzawDma/gvBNQqkTiqYTWLuZoyAsivsI4AaWTCzHG06/TMjsf1cYe9Cb97UCEhjz7HvnPk2p/tw==", - "dev": true, - "dependencies": { - "anymatch": "~3.1.1", - "braces": "~3.0.2", - "glob-parent": "~5.1.0", - "is-binary-path": "~2.1.0", - "is-glob": "~4.0.1", - "normalize-path": "~3.0.0", - "readdirp": "~3.5.0" - }, - "engines": { - "node": ">= 8.10.0" - }, - "optionalDependencies": { - "fsevents": "~2.3.1" - } - }, - "node_modules/color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "dev": true, - "dependencies": { - "color-name": "1.1.3" - } - }, - "node_modules/color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", - "dev": true - }, - "node_modules/concat-map": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", - "dev": true - }, - "node_modules/cross-spawn": { - "version": "6.0.5", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", - "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", - "dev": true, - "dependencies": { - "nice-try": "^1.0.4", - "path-key": "^2.0.1", - "semver": "^5.5.0", - "shebang-command": "^1.2.0", - "which": "^1.2.9" - }, - "engines": { - "node": ">=4.8" - } - }, - "node_modules/define-properties": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.3.tgz", - "integrity": "sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ==", - "dev": true, - "dependencies": { - "object-keys": "^1.0.12" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/error-ex": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", - "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", - "dev": true, - "dependencies": { - "is-arrayish": "^0.2.1" - } - }, - "node_modules/es-abstract": { - "version": "1.13.0", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.13.0.tgz", - "integrity": "sha512-vDZfg/ykNxQVwup/8E1BZhVzFfBxs9NqMzGcvIJrqg5k2/5Za2bWo40dK2J1pgLngZ7c+Shh8lwYtLGyrwPutg==", - "dev": true, - "dependencies": { - "es-to-primitive": "^1.2.0", - "function-bind": "^1.1.1", - "has": "^1.0.3", - "is-callable": "^1.1.4", - "is-regex": "^1.0.4", - "object-keys": "^1.0.12" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/es-to-primitive": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.0.tgz", - "integrity": "sha512-qZryBOJjV//LaxLTV6UC//WewneB3LcXOL9NP++ozKVXsIIIpm/2c13UDiD9Jp2eThsecw9m3jPqDwTyobcdbg==", - "dev": true, - "dependencies": { - "is-callable": "^1.1.4", - "is-date-object": "^1.0.1", - "is-symbol": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", - "dev": true, - "engines": { - "node": ">=0.8.0" - } - }, - "node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dev": true, - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/fsevents": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", - "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", - "dev": true, - "hasInstallScript": true, - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": "^8.16.0 || ^10.6.0 || >=11.0.0" - } - }, - "node_modules/function-bind": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", - "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", - "dev": true - }, - "node_modules/glob-parent": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", - "dev": true, - "dependencies": { - "is-glob": "^4.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/graceful-fs": { - "version": "4.2.2", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.2.tgz", - "integrity": "sha512-IItsdsea19BoLC7ELy13q1iJFNmd7ofZH5+X/pJr90/nRoPEX0DJo1dHDbgtYWOhJhcCgMDTOw84RZ72q6lB+Q==", - "dev": true - }, - "node_modules/has": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", - "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", - "dev": true, - "dependencies": { - "function-bind": "^1.1.1" - }, - "engines": { - "node": ">= 0.4.0" - } - }, - "node_modules/has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/has-symbols": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.0.tgz", - "integrity": "sha1-uhqPGvKg/DllD1yFA2dwQSIGO0Q=", - "dev": true, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/hosted-git-info": { - "version": "2.8.9", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz", - "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==", - "dev": true - }, - "node_modules/ignore": { - "version": "5.1.8", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.1.8.tgz", - "integrity": "sha512-BMpfD7PpiETpBl/A6S498BaIJ6Y/ABT93ETbby2fP00v4EbvPBXWEoaR1UBPKs3iR53pJY7EtZk5KACI57i1Uw==", - "dev": true, - "engines": { - "node": ">= 4" - } - }, - "node_modules/is-arrayish": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", - "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=", - "dev": true - }, - "node_modules/is-binary-path": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", - "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", - "dev": true, - "dependencies": { - "binary-extensions": "^2.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/is-callable": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.4.tgz", - "integrity": "sha512-r5p9sxJjYnArLjObpjA4xu5EKI3CuKHkJXMhT7kwbpUyIFD1n5PMAsoPvWnvtZiNz7LjkYDRZhd7FlI0eMijEA==", - "dev": true, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/is-date-object": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.1.tgz", - "integrity": "sha1-mqIOtq7rv/d/vTPnTKAbM1gdOhY=", - "dev": true, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/is-extglob": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", - "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-glob": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.1.tgz", - "integrity": "sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==", - "dev": true, - "dependencies": { - "is-extglob": "^2.1.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true, - "engines": { - "node": ">=0.12.0" - } - }, - "node_modules/is-regex": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.4.tgz", - "integrity": "sha1-VRdIm1RwkbCTDglWVM7SXul+lJE=", - "dev": true, - "dependencies": { - "has": "^1.0.1" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/is-symbol": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.2.tgz", - "integrity": "sha512-HS8bZ9ox60yCJLH9snBpIwv9pYUAkcuLhSA1oero1UB5y9aiQpRA8y2ex945AOtCZL1lJDeIk3G5LthswI46Lw==", - "dev": true, - "dependencies": { - "has-symbols": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", - "dev": true - }, - "node_modules/json-parse-better-errors": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz", - "integrity": "sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==", - "dev": true - }, - "node_modules/load-json-file": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-4.0.0.tgz", - "integrity": "sha1-L19Fq5HjMhYjT9U62rZo607AmTs=", - "dev": true, - "dependencies": { - "graceful-fs": "^4.1.2", - "parse-json": "^4.0.0", - "pify": "^3.0.0", - "strip-bom": "^3.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/memorystream": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/memorystream/-/memorystream-0.3.1.tgz", - "integrity": "sha1-htcJCzDORV1j+64S3aUaR93K+bI=", - "dev": true, - "engines": { - "node": ">= 0.10.0" - } - }, - "node_modules/minimatch": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", - "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", - "dev": true, - "dependencies": { - "brace-expansion": "^1.1.7" - }, - "engines": { - "node": "*" - } - }, - "node_modules/nice-try": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz", - "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==", - "dev": true - }, - "node_modules/normalize-package-data": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", - "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", - "dev": true, - "dependencies": { - "hosted-git-info": "^2.1.4", - "resolve": "^1.10.0", - "semver": "2 || 3 || 4 || 5", - "validate-npm-package-license": "^3.0.1" - } - }, - "node_modules/normalize-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", - "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/npm-run-all": { - "version": "4.1.5", - "resolved": "https://registry.npmjs.org/npm-run-all/-/npm-run-all-4.1.5.tgz", - "integrity": "sha512-Oo82gJDAVcaMdi3nuoKFavkIHBRVqQ1qvMb+9LHk/cF4P6B2m8aP04hGf7oL6wZ9BuGwX1onlLhpuoofSyoQDQ==", - "dev": true, - "dependencies": { - "ansi-styles": "^3.2.1", - "chalk": "^2.4.1", - "cross-spawn": "^6.0.5", - "memorystream": "^0.3.1", - "minimatch": "^3.0.4", - "pidtree": "^0.3.0", - "read-pkg": "^3.0.0", - "shell-quote": "^1.6.1", - "string.prototype.padend": "^3.0.0" - }, - "bin": { - "npm-run-all": "bin/npm-run-all/index.js", - "run-p": "bin/run-p/index.js", - "run-s": "bin/run-s/index.js" - }, - "engines": { - "node": ">= 4" - } - }, - "node_modules/object-keys": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", - "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", - "dev": true, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/onchange": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/onchange/-/onchange-7.1.0.tgz", - "integrity": "sha512-ZJcqsPiWUAUpvmnJri5TPBooqJOPmC0ttN65juhN15Q8xA+Nbg3BaxBHXQ45EistKKlKElb0edmbPWnKSBkvMg==", - "dev": true, - "dependencies": { - "@blakeembrey/deque": "^1.0.5", - "@blakeembrey/template": "^1.0.0", - "arg": "^4.1.3", - "chokidar": "^3.3.1", - "cross-spawn": "^7.0.1", - "ignore": "^5.1.4", - "tree-kill": "^1.2.2" - }, - "bin": { - "onchange": "dist/bin.js" - } - }, - "node_modules/onchange/node_modules/cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "dev": true, - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/onchange/node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/onchange/node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dev": true, - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/onchange/node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/onchange/node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dev": true, - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/parse-json": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-4.0.0.tgz", - "integrity": "sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA=", - "dev": true, - "dependencies": { - "error-ex": "^1.3.1", - "json-parse-better-errors": "^1.0.1" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/path-key": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", - "integrity": "sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/path-parse": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", - "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", - "dev": true - }, - "node_modules/path-type": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-3.0.0.tgz", - "integrity": "sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==", - "dev": true, - "dependencies": { - "pify": "^3.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/picomatch": { - "version": "2.2.3", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.2.3.tgz", - "integrity": "sha512-KpELjfwcCDUb9PeigTs2mBJzXUPzAuP2oPcA989He8Rte0+YUAjw1JVedDhuTKPkHjSYzMN3npC9luThGYEKdg==", - "dev": true, - "engines": { - "node": ">=8.6" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" - } - }, - "node_modules/pidtree": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/pidtree/-/pidtree-0.3.0.tgz", - "integrity": "sha512-9CT4NFlDcosssyg8KVFltgokyKZIFjoBxw8CTGy+5F38Y1eQWrt8tRayiUOXE+zVKQnYu5BR8JjCtvK3BcnBhg==", - "dev": true, - "bin": { - "pidtree": "bin/pidtree.js" - }, - "engines": { - "node": ">=0.10" - } - }, - "node_modules/pify": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", - "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/read-pkg": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-3.0.0.tgz", - "integrity": "sha1-nLxoaXj+5l0WwA4rGcI3/Pbjg4k=", - "dev": true, - "dependencies": { - "load-json-file": "^4.0.0", - "normalize-package-data": "^2.3.2", - "path-type": "^3.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/readdirp": { - "version": "3.5.0", - "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.5.0.tgz", - "integrity": "sha512-cMhu7c/8rdhkHXWsY+osBhfSy0JikwpHK/5+imo+LpeasTF8ouErHrlYkwT0++njiyuDvc7OFY5T3ukvZ8qmFQ==", - "dev": true, - "dependencies": { - "picomatch": "^2.2.1" - }, - "engines": { - "node": ">=8.10.0" - } - }, - "node_modules/resolve": { - "version": "1.12.0", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.12.0.tgz", - "integrity": "sha512-B/dOmuoAik5bKcD6s6nXDCjzUKnaDvdkRyAk6rsmsKLipWj4797iothd7jmmUhWTfinVMU+wc56rYKsit2Qy4w==", - "dev": true, - "dependencies": { - "path-parse": "^1.0.6" - } - }, - "node_modules/semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", - "dev": true, - "bin": { - "semver": "bin/semver" - } - }, - "node_modules/shebang-command": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", - "integrity": "sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=", - "dev": true, - "dependencies": { - "shebang-regex": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/shebang-regex": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", - "integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/shell-quote": { - "version": "1.7.1", - "resolved": "https://registry.npmjs.org/shell-quote/-/shell-quote-1.7.1.tgz", - "integrity": "sha512-2kUqeAGnMAu6YrTPX4E3LfxacH9gKljzVjlkUeSqY0soGwK4KLl7TURXCem712tkhBCeeaFP9QK4dKn88s3Icg==", - "dev": true - }, - "node_modules/spdx-correct": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.0.tgz", - "integrity": "sha512-lr2EZCctC2BNR7j7WzJ2FpDznxky1sjfxvvYEyzxNyb6lZXHODmEoJeFu4JupYlkfha1KZpJyoqiJ7pgA1qq8Q==", - "dev": true, - "dependencies": { - "spdx-expression-parse": "^3.0.0", - "spdx-license-ids": "^3.0.0" - } - }, - "node_modules/spdx-exceptions": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.2.0.tgz", - "integrity": "sha512-2XQACfElKi9SlVb1CYadKDXvoajPgBVPn/gOQLrTvHdElaVhr7ZEbqJaRnJLVNeaI4cMEAgVCeBMKF6MWRDCRA==", - "dev": true - }, - "node_modules/spdx-expression-parse": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.0.tgz", - "integrity": "sha512-Yg6D3XpRD4kkOmTpdgbUiEJFKghJH03fiC1OPll5h/0sO6neh2jqRDVHOQ4o/LMea0tgCkbMgea5ip/e+MkWyg==", - "dev": true, - "dependencies": { - "spdx-exceptions": "^2.1.0", - "spdx-license-ids": "^3.0.0" - } - }, - "node_modules/spdx-license-ids": { - "version": "3.0.5", - "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.5.tgz", - "integrity": "sha512-J+FWzZoynJEXGphVIS+XEh3kFSjZX/1i9gFBaWQcB+/tmpe2qUsSBABpcxqxnAxFdiUFEgAX1bjYGQvIZmoz9Q==", - "dev": true - }, - "node_modules/string.prototype.padend": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/string.prototype.padend/-/string.prototype.padend-3.0.0.tgz", - "integrity": "sha1-86rvfBcZ8XDF6rHDK/eA2W4h8vA=", - "dev": true, - "dependencies": { - "define-properties": "^1.1.2", - "es-abstract": "^1.4.3", - "function-bind": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/strip-bom": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", - "integrity": "sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", - "dev": true, - "dependencies": { - "has-flag": "^3.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dev": true, - "dependencies": { - "is-number": "^7.0.0" - }, - "engines": { - "node": ">=8.0" - } - }, - "node_modules/tree-kill": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/tree-kill/-/tree-kill-1.2.2.tgz", - "integrity": "sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A==", - "dev": true, - "bin": { - "tree-kill": "cli.js" - } - }, - "node_modules/uglify-js": { - "version": "3.13.6", - "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.13.6.tgz", - "integrity": "sha512-rRprLwl8RVaS+Qvx3Wh5hPfPBn9++G6xkGlUupya0s5aDmNjI7z3lnRLB3u7sN4OmbB0pWgzhM9BEJyiWAwtAA==", - "dev": true, - "bin": { - "uglifyjs": "bin/uglifyjs" - }, - "engines": { - "node": ">=0.8.0" - } - }, - "node_modules/validate-npm-package-license": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", - "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", - "dev": true, - "dependencies": { - "spdx-correct": "^3.0.0", - "spdx-expression-parse": "^3.0.0" - } - }, - "node_modules/which": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", - "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", - "dev": true, - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "which": "bin/which" - } - } - }, - "dependencies": { - "@blakeembrey/deque": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/@blakeembrey/deque/-/deque-1.0.5.tgz", - "integrity": "sha512-6xnwtvp9DY1EINIKdTfvfeAtCYw4OqBZJhtiqkT3ivjnEfa25VQ3TsKvaFfKm8MyGIEfE95qLe+bNEt3nB0Ylg==", - "dev": true - }, - "@blakeembrey/template": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@blakeembrey/template/-/template-1.0.0.tgz", - "integrity": "sha512-J6WGZqCLdRMHUkyRG6fBSIFJ0rL60/nsQNh5rQvsYZ5u0PsKw6XQcJcA3DWvd9cN3j/IQx5yB1fexhCafwwUUw==", - "dev": true - }, - "ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", - "dev": true, - "requires": { - "color-convert": "^1.9.0" - } - }, - "anymatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.2.tgz", - "integrity": "sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg==", - "dev": true, - "requires": { - "normalize-path": "^3.0.0", - "picomatch": "^2.0.4" - } - }, - "arg": { - "version": "4.1.3", - "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", - "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==", - "dev": true - }, - "balanced-match": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", - "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=", - "dev": true - }, - "binary-extensions": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", - "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==", - "dev": true - }, - "brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "dev": true, - "requires": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dev": true, - "requires": { - "fill-range": "^7.0.1" - } - }, - "chalk": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", - "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", - "dev": true, - "requires": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - } - }, - "chokidar": { - "version": "3.5.1", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.1.tgz", - "integrity": "sha512-9+s+Od+W0VJJzawDma/gvBNQqkTiqYTWLuZoyAsivsI4AaWTCzHG06/TMjsf1cYe9Cb97UCEhjz7HvnPk2p/tw==", - "dev": true, - "requires": { - "anymatch": "~3.1.1", - "braces": "~3.0.2", - "fsevents": "~2.3.1", - "glob-parent": "~5.1.0", - "is-binary-path": "~2.1.0", - "is-glob": "~4.0.1", - "normalize-path": "~3.0.0", - "readdirp": "~3.5.0" - } - }, - "color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "dev": true, - "requires": { - "color-name": "1.1.3" - } - }, - "color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", - "dev": true - }, - "concat-map": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", - "dev": true - }, - "cross-spawn": { - "version": "6.0.5", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", - "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", - "dev": true, - "requires": { - "nice-try": "^1.0.4", - "path-key": "^2.0.1", - "semver": "^5.5.0", - "shebang-command": "^1.2.0", - "which": "^1.2.9" - } - }, - "define-properties": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.3.tgz", - "integrity": "sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ==", - "dev": true, - "requires": { - "object-keys": "^1.0.12" - } - }, - "error-ex": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", - "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", - "dev": true, - "requires": { - "is-arrayish": "^0.2.1" - } - }, - "es-abstract": { - "version": "1.13.0", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.13.0.tgz", - "integrity": "sha512-vDZfg/ykNxQVwup/8E1BZhVzFfBxs9NqMzGcvIJrqg5k2/5Za2bWo40dK2J1pgLngZ7c+Shh8lwYtLGyrwPutg==", - "dev": true, - "requires": { - "es-to-primitive": "^1.2.0", - "function-bind": "^1.1.1", - "has": "^1.0.3", - "is-callable": "^1.1.4", - "is-regex": "^1.0.4", - "object-keys": "^1.0.12" - } - }, - "es-to-primitive": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.0.tgz", - "integrity": "sha512-qZryBOJjV//LaxLTV6UC//WewneB3LcXOL9NP++ozKVXsIIIpm/2c13UDiD9Jp2eThsecw9m3jPqDwTyobcdbg==", - "dev": true, - "requires": { - "is-callable": "^1.1.4", - "is-date-object": "^1.0.1", - "is-symbol": "^1.0.2" - } - }, - "escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", - "dev": true - }, - "fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dev": true, - "requires": { - "to-regex-range": "^5.0.1" - } - }, - "fsevents": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", - "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", - "dev": true, - "optional": true - }, - "function-bind": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", - "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", - "dev": true - }, - "glob-parent": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", - "dev": true, - "requires": { - "is-glob": "^4.0.1" - } - }, - "graceful-fs": { - "version": "4.2.2", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.2.tgz", - "integrity": "sha512-IItsdsea19BoLC7ELy13q1iJFNmd7ofZH5+X/pJr90/nRoPEX0DJo1dHDbgtYWOhJhcCgMDTOw84RZ72q6lB+Q==", - "dev": true - }, - "has": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", - "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", - "dev": true, - "requires": { - "function-bind": "^1.1.1" - } - }, - "has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", - "dev": true - }, - "has-symbols": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.0.tgz", - "integrity": "sha1-uhqPGvKg/DllD1yFA2dwQSIGO0Q=", - "dev": true - }, - "hosted-git-info": { - "version": "2.8.9", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz", - "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==", - "dev": true - }, - "ignore": { - "version": "5.1.8", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.1.8.tgz", - "integrity": "sha512-BMpfD7PpiETpBl/A6S498BaIJ6Y/ABT93ETbby2fP00v4EbvPBXWEoaR1UBPKs3iR53pJY7EtZk5KACI57i1Uw==", - "dev": true - }, - "is-arrayish": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", - "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=", - "dev": true - }, - "is-binary-path": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", - "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", - "dev": true, - "requires": { - "binary-extensions": "^2.0.0" - } - }, - "is-callable": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.4.tgz", - "integrity": "sha512-r5p9sxJjYnArLjObpjA4xu5EKI3CuKHkJXMhT7kwbpUyIFD1n5PMAsoPvWnvtZiNz7LjkYDRZhd7FlI0eMijEA==", - "dev": true - }, - "is-date-object": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.1.tgz", - "integrity": "sha1-mqIOtq7rv/d/vTPnTKAbM1gdOhY=", - "dev": true - }, - "is-extglob": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", - "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=", - "dev": true - }, - "is-glob": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.1.tgz", - "integrity": "sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==", - "dev": true, - "requires": { - "is-extglob": "^2.1.1" - } - }, - "is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true - }, - "is-regex": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.4.tgz", - "integrity": "sha1-VRdIm1RwkbCTDglWVM7SXul+lJE=", - "dev": true, - "requires": { - "has": "^1.0.1" - } - }, - "is-symbol": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.2.tgz", - "integrity": "sha512-HS8bZ9ox60yCJLH9snBpIwv9pYUAkcuLhSA1oero1UB5y9aiQpRA8y2ex945AOtCZL1lJDeIk3G5LthswI46Lw==", - "dev": true, - "requires": { - "has-symbols": "^1.0.0" - } - }, - "isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", - "dev": true - }, - "json-parse-better-errors": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz", - "integrity": "sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==", - "dev": true - }, - "load-json-file": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-4.0.0.tgz", - "integrity": "sha1-L19Fq5HjMhYjT9U62rZo607AmTs=", - "dev": true, - "requires": { - "graceful-fs": "^4.1.2", - "parse-json": "^4.0.0", - "pify": "^3.0.0", - "strip-bom": "^3.0.0" - } - }, - "memorystream": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/memorystream/-/memorystream-0.3.1.tgz", - "integrity": "sha1-htcJCzDORV1j+64S3aUaR93K+bI=", - "dev": true - }, - "minimatch": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", - "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", - "dev": true, - "requires": { - "brace-expansion": "^1.1.7" - } - }, - "nice-try": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz", - "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==", - "dev": true - }, - "normalize-package-data": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", - "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", - "dev": true, - "requires": { - "hosted-git-info": "^2.1.4", - "resolve": "^1.10.0", - "semver": "2 || 3 || 4 || 5", - "validate-npm-package-license": "^3.0.1" - } - }, - "normalize-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", - "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", - "dev": true - }, - "npm-run-all": { - "version": "4.1.5", - "resolved": "https://registry.npmjs.org/npm-run-all/-/npm-run-all-4.1.5.tgz", - "integrity": "sha512-Oo82gJDAVcaMdi3nuoKFavkIHBRVqQ1qvMb+9LHk/cF4P6B2m8aP04hGf7oL6wZ9BuGwX1onlLhpuoofSyoQDQ==", - "dev": true, - "requires": { - "ansi-styles": "^3.2.1", - "chalk": "^2.4.1", - "cross-spawn": "^6.0.5", - "memorystream": "^0.3.1", - "minimatch": "^3.0.4", - "pidtree": "^0.3.0", - "read-pkg": "^3.0.0", - "shell-quote": "^1.6.1", - "string.prototype.padend": "^3.0.0" - } - }, - "object-keys": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", - "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", - "dev": true - }, - "onchange": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/onchange/-/onchange-7.1.0.tgz", - "integrity": "sha512-ZJcqsPiWUAUpvmnJri5TPBooqJOPmC0ttN65juhN15Q8xA+Nbg3BaxBHXQ45EistKKlKElb0edmbPWnKSBkvMg==", - "dev": true, - "requires": { - "@blakeembrey/deque": "^1.0.5", - "@blakeembrey/template": "^1.0.0", - "arg": "^4.1.3", - "chokidar": "^3.3.1", - "cross-spawn": "^7.0.1", - "ignore": "^5.1.4", - "tree-kill": "^1.2.2" - }, - "dependencies": { - "cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "dev": true, - "requires": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - } - }, - "path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "dev": true - }, - "shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dev": true, - "requires": { - "shebang-regex": "^3.0.0" - } - }, - "shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "dev": true - }, - "which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dev": true, - "requires": { - "isexe": "^2.0.0" - } - } - } - }, - "parse-json": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-4.0.0.tgz", - "integrity": "sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA=", - "dev": true, - "requires": { - "error-ex": "^1.3.1", - "json-parse-better-errors": "^1.0.1" - } - }, - "path-key": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", - "integrity": "sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=", - "dev": true - }, - "path-parse": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", - "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", - "dev": true - }, - "path-type": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-3.0.0.tgz", - "integrity": "sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==", - "dev": true, - "requires": { - "pify": "^3.0.0" - } - }, - "picomatch": { - "version": "2.2.3", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.2.3.tgz", - "integrity": "sha512-KpELjfwcCDUb9PeigTs2mBJzXUPzAuP2oPcA989He8Rte0+YUAjw1JVedDhuTKPkHjSYzMN3npC9luThGYEKdg==", - "dev": true - }, - "pidtree": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/pidtree/-/pidtree-0.3.0.tgz", - "integrity": "sha512-9CT4NFlDcosssyg8KVFltgokyKZIFjoBxw8CTGy+5F38Y1eQWrt8tRayiUOXE+zVKQnYu5BR8JjCtvK3BcnBhg==", - "dev": true - }, - "pify": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", - "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=", - "dev": true - }, - "read-pkg": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-3.0.0.tgz", - "integrity": "sha1-nLxoaXj+5l0WwA4rGcI3/Pbjg4k=", - "dev": true, - "requires": { - "load-json-file": "^4.0.0", - "normalize-package-data": "^2.3.2", - "path-type": "^3.0.0" - } - }, - "readdirp": { - "version": "3.5.0", - "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.5.0.tgz", - "integrity": "sha512-cMhu7c/8rdhkHXWsY+osBhfSy0JikwpHK/5+imo+LpeasTF8ouErHrlYkwT0++njiyuDvc7OFY5T3ukvZ8qmFQ==", - "dev": true, - "requires": { - "picomatch": "^2.2.1" - } - }, - "resolve": { - "version": "1.12.0", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.12.0.tgz", - "integrity": "sha512-B/dOmuoAik5bKcD6s6nXDCjzUKnaDvdkRyAk6rsmsKLipWj4797iothd7jmmUhWTfinVMU+wc56rYKsit2Qy4w==", - "dev": true, - "requires": { - "path-parse": "^1.0.6" - } - }, - "semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", - "dev": true - }, - "shebang-command": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", - "integrity": "sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=", - "dev": true, - "requires": { - "shebang-regex": "^1.0.0" - } - }, - "shebang-regex": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", - "integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=", - "dev": true - }, - "shell-quote": { - "version": "1.7.1", - "resolved": "https://registry.npmjs.org/shell-quote/-/shell-quote-1.7.1.tgz", - "integrity": "sha512-2kUqeAGnMAu6YrTPX4E3LfxacH9gKljzVjlkUeSqY0soGwK4KLl7TURXCem712tkhBCeeaFP9QK4dKn88s3Icg==", - "dev": true - }, - "spdx-correct": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.0.tgz", - "integrity": "sha512-lr2EZCctC2BNR7j7WzJ2FpDznxky1sjfxvvYEyzxNyb6lZXHODmEoJeFu4JupYlkfha1KZpJyoqiJ7pgA1qq8Q==", - "dev": true, - "requires": { - "spdx-expression-parse": "^3.0.0", - "spdx-license-ids": "^3.0.0" - } - }, - "spdx-exceptions": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.2.0.tgz", - "integrity": "sha512-2XQACfElKi9SlVb1CYadKDXvoajPgBVPn/gOQLrTvHdElaVhr7ZEbqJaRnJLVNeaI4cMEAgVCeBMKF6MWRDCRA==", - "dev": true - }, - "spdx-expression-parse": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.0.tgz", - "integrity": "sha512-Yg6D3XpRD4kkOmTpdgbUiEJFKghJH03fiC1OPll5h/0sO6neh2jqRDVHOQ4o/LMea0tgCkbMgea5ip/e+MkWyg==", - "dev": true, - "requires": { - "spdx-exceptions": "^2.1.0", - "spdx-license-ids": "^3.0.0" - } - }, - "spdx-license-ids": { - "version": "3.0.5", - "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.5.tgz", - "integrity": "sha512-J+FWzZoynJEXGphVIS+XEh3kFSjZX/1i9gFBaWQcB+/tmpe2qUsSBABpcxqxnAxFdiUFEgAX1bjYGQvIZmoz9Q==", - "dev": true - }, - "string.prototype.padend": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/string.prototype.padend/-/string.prototype.padend-3.0.0.tgz", - "integrity": "sha1-86rvfBcZ8XDF6rHDK/eA2W4h8vA=", - "dev": true, - "requires": { - "define-properties": "^1.1.2", - "es-abstract": "^1.4.3", - "function-bind": "^1.0.2" - } - }, - "strip-bom": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", - "integrity": "sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=", - "dev": true - }, - "supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", - "dev": true, - "requires": { - "has-flag": "^3.0.0" - } - }, - "to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dev": true, - "requires": { - "is-number": "^7.0.0" - } - }, - "tree-kill": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/tree-kill/-/tree-kill-1.2.2.tgz", - "integrity": "sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A==", - "dev": true - }, - "uglify-js": { - "version": "3.13.6", - "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.13.6.tgz", - "integrity": "sha512-rRprLwl8RVaS+Qvx3Wh5hPfPBn9++G6xkGlUupya0s5aDmNjI7z3lnRLB3u7sN4OmbB0pWgzhM9BEJyiWAwtAA==", - "dev": true - }, - "validate-npm-package-license": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", - "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", - "dev": true, - "requires": { - "spdx-correct": "^3.0.0", - "spdx-expression-parse": "^3.0.0" - } - }, - "which": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", - "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", - "dev": true, - "requires": { - "isexe": "^2.0.0" - } - } - } -} diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 00000000..48757737 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,12 @@ +mkdocs +mkdocs-material +mkdocs-material-extensions +mkdocstrings +mkdocstrings-python +mkdocs-git-revision-date-localized-plugin +mkdocs-video +mkdocs-glightbox +mkdocs-redirects +mkdocs-rss-plugin +mkdocs-minify-plugin +mkdocs-material[imaging] diff --git a/scripts/fix_head.py b/scripts/fix_head.py new file mode 100644 index 00000000..5fd6bf26 --- /dev/null +++ b/scripts/fix_head.py @@ -0,0 +1,74 @@ +import os + + +def adjust_headers(markdown_file): + with open(markdown_file, "r", encoding="utf-8") as file: + lines = file.readlines() + + adjusted_lines = [] + top_header_found = False + next_header_found = False + excerpt_processing = False + in_code_block = False # Flag to track if inside a code block + + for i, line in enumerate(lines): + # Toggle in_code_block flag when encountering code block delimiters (```) + if line.strip().startswith("```"): + in_code_block = not in_code_block + + if not in_code_block and line.startswith("# "): + if not top_header_found: + # First top-level header, keep it as is + top_header_found = True + elif not next_header_found: + # Next header found, stop processing excerpt + next_header_found = True + if excerpt_processing: + # Insert with one blank line before and after + adjusted_lines.append("\n\n") + excerpt_processing = False + + if next_header_found and not line.strip() == "": + # Increment header level for all headers after the first top-level header + line = "#" + line + + if top_header_found and not next_header_found and not in_code_block: + if line.startswith("> "): + # Process excerpt lines + excerpt_processing = True + adjusted_lines.append(line[2:]) # Remove '> ' from the line + continue + elif excerpt_processing and not line.strip() == "": + # End of excerpt, add with one blank line before it + adjusted_lines.append("\n\n\n") + excerpt_processing = False + + # Add a blank line after a header if the next line is not blank + if ( + line.startswith("#") + and not in_code_block + and (i + 1 < len(lines) and not lines[i + 1].strip() == "") + ): + adjusted_lines.append("\n") + + adjusted_lines.append(line) + + if excerpt_processing: + # Case where file ends without another header + adjusted_lines.append("\n\n") + + # Write the adjusted content back to the file + with open(markdown_file, "w", encoding="utf-8") as file: + file.writelines(adjusted_lines) + + +def main(): + for root, dirs, files in os.walk("./new_posts"): + for filename in files: + if filename.endswith(".md"): + markdown_file = os.path.join(root, filename) + adjust_headers(markdown_file) + + +if __name__ == "__main__": + main() diff --git a/scripts/fix_multiple_blank_lines.py b/scripts/fix_multiple_blank_lines.py new file mode 100644 index 00000000..33b010fd --- /dev/null +++ b/scripts/fix_multiple_blank_lines.py @@ -0,0 +1,41 @@ +import os + + +def remove_extra_blank_lines(markdown_file): + with open(markdown_file, "r", encoding="utf-8") as file: + lines = file.readlines() + + adjusted_lines = [] + in_code_block = False + last_line_blank = False + + for line in lines: + # Check for start or end of a code block + if line.strip().startswith("```"): + in_code_block = not in_code_block + + # Remove extra blank lines outside of code blocks + if not in_code_block: + if line.strip() == "": + if last_line_blank: + continue # Skip this line + last_line_blank = True + else: + last_line_blank = False + adjusted_lines.append(line) + + # Write the adjusted content back to the file + with open(markdown_file, "w", encoding="utf-8") as file: + file.writelines(adjusted_lines) + + +def main(): + for root, dirs, files in os.walk("./docs/posts/"): + for filename in files: + if filename.endswith(".md"): + markdown_file = os.path.join(root, filename) + remove_extra_blank_lines(markdown_file) + + +if __name__ == "__main__": + main() diff --git a/scripts/jekyll_to_mkdcos.py/fix_admonition.py b/scripts/jekyll_to_mkdcos.py/fix_admonition.py new file mode 100644 index 00000000..adcbb5bd --- /dev/null +++ b/scripts/jekyll_to_mkdcos.py/fix_admonition.py @@ -0,0 +1,44 @@ +import os +import re +import pdb + + +def convert_admonitions(markdown_file): + with open(markdown_file, "r", encoding="utf-8") as file: + content = file.read() + + # Pattern to match Jekyll admonitions with leading spaces + pattern = re.compile(r"(\s*)(.+?)\n\1\{: \.notice--(info|warning)\}") + + # Function to replace the Jekyll format with MkDocs format + def replace_admonition(match): + leading_spaces = match.group(1) + text = match.group(2).strip() + admonition_type = "note" if match.group(3) == "info" else "warning" + mkdocs_admonition = ( + f"{leading_spaces}!!! {admonition_type}\n\n{leading_spaces} {text}\n\n" + ) + return mkdocs_admonition + + # Perform the substitution + new_content = pattern.sub(replace_admonition, content) + + # Write the new content back to the file + with open(markdown_file, "w", encoding="utf-8") as file: + file.write(new_content) + + +def main(): + try: + for root, dirs, files in os.walk("./docs/posts"): + for filename in files: + if filename.endswith(".md"): + markdown_file = os.path.join(root, filename) + convert_admonitions(markdown_file) + except Exception as e: + print(f"An exception occurred: {e}") + pdb.post_mortem() + + +if __name__ == "__main__": + main() diff --git a/scripts/jekyll_to_mkdcos.py/jeklly_to_mkdocs.py b/scripts/jekyll_to_mkdcos.py/jeklly_to_mkdocs.py new file mode 100644 index 00000000..070770dc --- /dev/null +++ b/scripts/jekyll_to_mkdcos.py/jeklly_to_mkdocs.py @@ -0,0 +1,86 @@ +import os +from datetime import datetime +import re +import pdb +import yaml + +AUTHORS = ["copdips"] # Global variable for authors + + +def convert_jekyll_to_mkdocs(jekyll_file): + with open(jekyll_file, "r", encoding="utf-8") as file: + content = file.read() + + # Extracting the YAML front matter + front_matter = re.search(r"^---\s+(.*?)\s+---", content, re.DOTALL) + if not front_matter: + return None + + # Load YAML data + yaml_data = yaml.safe_load(front_matter.group(1)) + + # Preparing MkDocs YAML header + mkdocs_data = { + "date": { + "created": datetime.strptime( + jekyll_file.split("/")[-1][:10], "%Y-%m-%d" + ).date() + }, + "authors": AUTHORS, + "comments": True, + "description": yaml_data.get("excerpt", ""), + "categories": yaml_data.get("tags", []), + } + + # Add 'updated' date if 'last_modified_at' is present + # and is a datetime object + if "last_modified_at" in yaml_data and isinstance( + yaml_data["last_modified_at"], datetime + ): + mkdocs_data["date"]["updated"] = yaml_data["last_modified_at"].date() + + # Handle draft status + if not yaml_data.get("published", True): + mkdocs_data["draft"] = True + + # Convert to YAML string + mkdocs_yaml = yaml.safe_dump(mkdocs_data, default_flow_style=False).strip() + + # Combine new YAML with the original content without old YAML and + # add Jekyll title as top-level header + new_content = ( + "---\n" + + mkdocs_yaml + + "\n---\n\n# " + + yaml_data.get("title", "") + + re.sub(r"^---\s+.*?\s+---", "", content, flags=re.DOTALL) + ) + + return new_content + + +def main(): + os.makedirs("./docs/posts", exist_ok=True) + + for root, dirs, files in os.walk("./_posts"): + for filename in files: + if filename.endswith(".md"): + jekyll_file = os.path.join(root, filename) + new_content = convert_jekyll_to_mkdocs(jekyll_file) + + if new_content: + # Fixing the path for new_root + new_root = root.replace("./_posts", "./new_posts") + os.makedirs(new_root, exist_ok=True) + with open( + os.path.join(new_root, filename), "w", encoding="utf-8" + ) as file: + file.write(new_content) + + +if __name__ == "__main__": + try: + main() + except Exception as e: + print(f"An exception occurred: {e}") + pdb.post_mortem()