Skip to content
This repository has been archived by the owner on Apr 23, 2024. It is now read-only.

Commit

Permalink
Change arguments in get_loans function to return group guaranteed loa…
Browse files Browse the repository at this point in the history
…ns by default.
  • Loading branch information
thicccat688 committed Jan 9, 2024
1 parent f93f547 commit 12712d7
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 2 deletions.
7 changes: 6 additions & 1 deletion peerberrypy/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@ def get_loans(
loan_types: Optional[List[str]] = None,
sort: str = 'loan_amount',
ascending_sort: bool = False,
group_guarantee: Optional[bool] = None,
group_guarantee: bool = True,
exclude_invested_loans: Optional[bool] = None,
raw: bool = False,
) -> 'Union[pd.DataFrame, List[dict]]':
Expand All @@ -179,7 +179,9 @@ def get_loans(
:param raw: Returns python list if True or pandas DataFrame if False (False by default)
:return: All available loans for investment according to specified parameters
"""

argv = locals()

if quantity <= 0:
raise ValueError('You need to fetch at least 1 loan.')

Expand All @@ -189,6 +191,7 @@ def get_loans(
argv.pop('quantity', None)
argv.pop('raw', None)
argv.pop('start_page', None)

do_get_loans_page = functools.partial(self.get_loans_page, **argv)

loans = []
Expand All @@ -198,7 +201,9 @@ def get_loans(

for page_num in range(total_pages):
remaining_items = quantity - (page_num * max_page_size)

page_size = min(remaining_items, max_page_size)

loans_data = do_get_loans_page(page_num)['data']

if len(loans_data) == 0:
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"

[project]
name = "peerberrypy"
version = "1.7.0"
version = "1.9.0"
authors = [
{ name = "Tomás Perestrelo", email = "[email protected]" }
]
Expand Down

0 comments on commit 12712d7

Please sign in to comment.