Skip to content

Commit

Permalink
Merge pull request #208 from carlmontanari/develop
Browse files Browse the repository at this point in the history
Prepare 2022.01.30 Release
  • Loading branch information
carlmontanari authored Jan 29, 2022
2 parents 7af5387 + 6e909b4 commit 89589ee
Show file tree
Hide file tree
Showing 124 changed files with 3,294 additions and 606 deletions.
25 changes: 18 additions & 7 deletions .github/workflows/commit.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ jobs:
max-parallel: 1
matrix:
os: [ubuntu-latest]
version: [3.9]
version: ["3.10"]
steps:
- uses: actions/checkout@v2
- name: set up python ${{ matrix.version }}
Expand All @@ -30,7 +30,7 @@ jobs:
max-parallel: 1
matrix:
os: [ubuntu-latest]
version: [3.9]
version: ["3.10"]
steps:
- uses: actions/checkout@v2
- name: set up python ${{ matrix.version }}
Expand All @@ -52,10 +52,10 @@ jobs:
build_posix:
runs-on: ${{ matrix.os }}
strategy:
max-parallel: 12
max-parallel: 8
matrix:
os: [ubuntu-latest, macos-latest]
version: [3.6, 3.7, 3.8, 3.9, 3.10.0-rc.2]
version: ["3.7", "3.8", "3.9", "3.10"]
steps:
- uses: actions/checkout@v2
- name: set up python ${{ matrix.version }}
Expand Down Expand Up @@ -88,7 +88,18 @@ jobs:
steps:
- uses: actions/checkout@v2
- run: docker run -v $(pwd):/docs --entrypoint "" squidfunk/mkdocs-material:latest ash -c 'pip install mdx_gh_links && mkdocs build --clean --strict'
- name: Cache htmltest external links
uses: actions/cache@v2
with:
path: tmp/.htmltest
# key will contain hash of all md files to check if files have changed
# when files are changed, a new key name is formed, and thus a new cache will be saved
key: htmltest-${{ hashFiles('docs/**/*.md') }}
# the restore key will fetch any previously saved cache even if there is no match on key
# this allows to use cache from prev runs and update it
restore-keys: |
htmltest-
- name: htmltest
run: |
curl https://htmltest.wjdp.uk | bash
./bin/htmltest -c docs/htmltest.yml
uses: wjdp/[email protected]
with:
config: docs/htmltest.yml
6 changes: 3 additions & 3 deletions .github/workflows/publish.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: set up python 3.9
- name: set up python 3.10
uses: actions/setup-python@v2
with:
python-version: 3.9
python-version: "3.10"
- name: setup publish env
run: |
python -m pip install --upgrade pip
Expand All @@ -27,7 +27,7 @@ jobs:
python setup.py sdist bdist_wheel
python -m twine upload dist/*
- name: create release branch
uses: peterjgrainger/action-create-branch@v2.0.1
uses: peterjgrainger/action-create-branch@v2.1.0
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
Expand Down
6 changes: 3 additions & 3 deletions .github/workflows/weekly.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ jobs:
max-parallel: 1
matrix:
os: [ubuntu-latest]
version: [3.9]
version: ["3.10"]
steps:
- uses: actions/checkout@v2
- name: set up python ${{ matrix.version }}
Expand All @@ -31,10 +31,10 @@ jobs:
build_posix:
runs-on: ${{ matrix.os }}
strategy:
max-parallel: 12
max-parallel: 8
matrix:
os: [ubuntu-latest, macos-latest]
version: [3.6, 3.7, 3.8, 3.9, 3.10.0-rc.2]
version: ["3.7", "3.8", "3.9", "3.10"]
steps:
- uses: actions/checkout@v2
- name: set up python ${{ matrix.version }}
Expand Down
8 changes: 4 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ scrapli

---

scrapli -- scrap(e c)li -- is a python 3.6+ library focused on connecting to devices, specifically network devices
scrapli -- scrap(e c)li -- is a python 3.7+ library focused on connecting to devices, specifically network devices
(routers/switches/firewalls/etc.) via Telnet or SSH.

#### Key Features:
Expand All @@ -47,15 +47,15 @@ scrapli -- scrap(e c)li -- is a python 3.6+ library focused on connecting to de

## Requirements

MacOS or \*nix<sup>1</sup>, Python 3.6+
MacOS or \*nix<sup>1</sup>, Python 3.7+

scrapli "core" has no requirements other than the Python standard library<sup>2</sup>.


<sup>1</sup> Although many parts of scrapli *do* run on Windows, Windows is not officially supported

<sup>2</sup> Python 3.6 requires the `dataclass` backport as well as third party `async_generator` library, Python 3.
7+ has no external dependencies for scrapli "core"
<sup>2</sup> While Python 3.6 has been dropped, it *probably* still works, but requires the `dataclass`
backport as well as third party `async_generator` library, Python 3.7+ has no external dependencies for scrapli "core"


## Installation
Expand Down
32 changes: 11 additions & 21 deletions docs/api_docs/channel/async_channel.md
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ scrapli.channel.async_channel
import asyncio
import re
import time
from io import SEEK_END, BytesIO
from io import BytesIO

try:
from contextlib import asynccontextmanager
Expand Down Expand Up @@ -173,8 +173,7 @@ class AsyncChannel(BaseChannel):
b = await self.read()
read_buf.write(b)

read_buf.seek(-self._base_channel_args.comms_prompt_search_depth, SEEK_END)
search_buf = read_buf.read()
search_buf = self._process_read_buf(read_buf=read_buf)

channel_match = re.search(
pattern=search_pattern,
Expand Down Expand Up @@ -215,8 +214,7 @@ class AsyncChannel(BaseChannel):
b = await self.read()
read_buf.write(b)

read_buf.seek(-self._base_channel_args.comms_prompt_search_depth, SEEK_END)
search_buf = read_buf.read()
search_buf = self._process_read_buf(read_buf=read_buf)

for search_pattern in search_patterns:
channel_match = re.search(
Expand Down Expand Up @@ -277,8 +275,7 @@ class AsyncChannel(BaseChannel):
except ScrapliTimeout:
pass

read_buf.seek(-self._base_channel_args.comms_prompt_search_depth, SEEK_END)
search_buf = read_buf.read()
search_buf = self._process_read_buf(read_buf=read_buf)

if (time.time() - start) > read_duration:
break
Expand Down Expand Up @@ -689,11 +686,9 @@ class AsyncChannel(BaseChannel):
)

self.write(channel_input=channel_input, redacted=bool(hidden_input))
if not channel_response or hidden_input is True:
self.send_return()
else:
if channel_response and hidden_input is not True:
buf += await self._read_until_input(channel_input=bytes_channel_input)
self.send_return()
self.send_return()
buf += await self._read_until_explicit_prompt(prompts=prompts)

processed_buf += self._process_output(
Expand Down Expand Up @@ -858,8 +853,7 @@ class AsyncChannel(BaseChannel):
b = await self.read()
read_buf.write(b)

read_buf.seek(-self._base_channel_args.comms_prompt_search_depth, SEEK_END)
search_buf = read_buf.read()
search_buf = self._process_read_buf(read_buf=read_buf)

channel_match = re.search(
pattern=search_pattern,
Expand Down Expand Up @@ -900,8 +894,7 @@ class AsyncChannel(BaseChannel):
b = await self.read()
read_buf.write(b)

read_buf.seek(-self._base_channel_args.comms_prompt_search_depth, SEEK_END)
search_buf = read_buf.read()
search_buf = self._process_read_buf(read_buf=read_buf)

for search_pattern in search_patterns:
channel_match = re.search(
Expand Down Expand Up @@ -962,8 +955,7 @@ class AsyncChannel(BaseChannel):
except ScrapliTimeout:
pass

read_buf.seek(-self._base_channel_args.comms_prompt_search_depth, SEEK_END)
search_buf = read_buf.read()
search_buf = self._process_read_buf(read_buf=read_buf)

if (time.time() - start) > read_duration:
break
Expand Down Expand Up @@ -1374,11 +1366,9 @@ class AsyncChannel(BaseChannel):
)

self.write(channel_input=channel_input, redacted=bool(hidden_input))
if not channel_response or hidden_input is True:
self.send_return()
else:
if channel_response and hidden_input is not True:
buf += await self._read_until_input(channel_input=bytes_channel_input)
self.send_return()
self.send_return()
buf += await self._read_until_explicit_prompt(prompts=prompts)

processed_buf += self._process_output(
Expand Down
74 changes: 67 additions & 7 deletions docs/api_docs/channel/base_channel.md
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ import re
from dataclasses import dataclass
from datetime import datetime
from functools import lru_cache
from io import BytesIO
from io import SEEK_END, BytesIO
from typing import BinaryIO, List, Optional, Pattern, Tuple, Union

from scrapli.exceptions import ScrapliAuthenticationFailed, ScrapliTypeError, ScrapliValueError
Expand Down Expand Up @@ -141,7 +141,7 @@ class BaseChannel:
self.channel_log: Optional[BinaryIO] = None

self._auth_telnet_login_pattern = r"^(.*username:)|(.*login:)\s?$"
self._auth_password_pattern = r"^password:\s?$"
self._auth_password_pattern = r"^(.*@.*)?password:\s?$"
self._auth_passphrase_pattern = r"enter passphrase for key"

@property
Expand Down Expand Up @@ -292,10 +292,11 @@ class BaseChannel:
# if you change the mode --> "wb" or "ab" it works as you would hope/expect; those
# are the only values it can possibly be at this point though so we can safely
# ignore here
self.channel_log = open( # pylint: disable=R1732
# note that this will *always* be binary mode, so there doesn't need to be any
# encoding, hence ignoring that pylint message!
self.channel_log = open( # pylint: disable=W1514,R1732
channel_log_destination,
mode=f"{self._base_channel_args.channel_log_mode}b", # type: ignore
encoding="utf-8",
)

def close(self) -> None:
Expand All @@ -315,6 +316,35 @@ class BaseChannel:
if self.channel_log:
self.channel_log.close()

def _process_read_buf(self, read_buf: BytesIO) -> bytes:
"""
Process the read buffer

Seeks backwards up to search depth then partitions on newlines. Partition is to ensure that
the resulting search_buf does not end up with partial lines in the output which can cause
prompt patterns to match places they should not match!

Args:
read_buf: bytesio object read from the transport

Returns:
bytes: cleaned up search buffer

Raises:
N/A

"""
read_buf.seek(-self._base_channel_args.comms_prompt_search_depth, SEEK_END)
search_buf = read_buf.read()

before, _, search_buf = search_buf.partition(b"\n")

if not search_buf:
# didn't split on anything or nothing after partition
search_buf = before

return search_buf

def write(self, channel_input: str, redacted: bool = False) -> None:
"""
Write input to the underlying Transport session
Expand Down Expand Up @@ -665,7 +695,7 @@ class BaseChannel:
self.channel_log: Optional[BinaryIO] = None

self._auth_telnet_login_pattern = r"^(.*username:)|(.*login:)\s?$"
self._auth_password_pattern = r"^password:\s?$"
self._auth_password_pattern = r"^(.*@.*)?password:\s?$"
self._auth_passphrase_pattern = r"enter passphrase for key"

@property
Expand Down Expand Up @@ -816,10 +846,11 @@ class BaseChannel:
# if you change the mode --> "wb" or "ab" it works as you would hope/expect; those
# are the only values it can possibly be at this point though so we can safely
# ignore here
self.channel_log = open( # pylint: disable=R1732
# note that this will *always* be binary mode, so there doesn't need to be any
# encoding, hence ignoring that pylint message!
self.channel_log = open( # pylint: disable=W1514,R1732
channel_log_destination,
mode=f"{self._base_channel_args.channel_log_mode}b", # type: ignore
encoding="utf-8",
)

def close(self) -> None:
Expand All @@ -839,6 +870,35 @@ class BaseChannel:
if self.channel_log:
self.channel_log.close()

def _process_read_buf(self, read_buf: BytesIO) -> bytes:
"""
Process the read buffer

Seeks backwards up to search depth then partitions on newlines. Partition is to ensure that
the resulting search_buf does not end up with partial lines in the output which can cause
prompt patterns to match places they should not match!

Args:
read_buf: bytesio object read from the transport

Returns:
bytes: cleaned up search buffer

Raises:
N/A

"""
read_buf.seek(-self._base_channel_args.comms_prompt_search_depth, SEEK_END)
search_buf = read_buf.read()

before, _, search_buf = search_buf.partition(b"\n")

if not search_buf:
# didn't split on anything or nothing after partition
search_buf = before

return search_buf

def write(self, channel_input: str, redacted: bool = False) -> None:
"""
Write input to the underlying Transport session
Expand Down
Loading

0 comments on commit 89589ee

Please sign in to comment.