Skip to content

Commit

Permalink
Merge pull request #28 from superagentxai/dev
Browse files Browse the repository at this point in the history
Dev
  • Loading branch information
RaghavPrabhu authored Nov 27, 2024
2 parents ca2c00e + bac6221 commit 3cfd090
Show file tree
Hide file tree
Showing 15 changed files with 790 additions and 306 deletions.
592 changes: 299 additions & 293 deletions poetry.lock

Large diffs are not rendered by default.

10 changes: 7 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "superagentx-handlers"
version = "0.1.4"
version = "0.1.5"
description = ""
authors = [
"SuperAgentX AI <[email protected]>",
Expand All @@ -24,7 +24,7 @@ openai = "^1.52.0"
google-auth-oauthlib = "^1.2.1"
google-api-python-client = "^2.149.0"
wikipedia-api = "^0.7.1"
crawl4ai = "^0.3.71"
crawl4ai = "0.3.731"
sqlalchemy = {extras = ["asyncio"], version = "^2.0.36"}
psycopg2-binary = "^2.9.10"
oracledb = "^2.4.1"
Expand All @@ -34,13 +34,17 @@ aiomysql = "^0.2.0"
aioodbc = "^0.5.0"
jira = "^3.8.0"
pypdf = "^5.0.1"
tweepy = {extras = ["async"], version = "^4.14.0"}
requests-html = "^0.10.0"
pyshorteners = "^1.0.1"
lxml-html-clean = "^0.4.1"

[tool.poetry.group.test.dependencies]
pytest = "^8.3.3"
pytest-asyncio = "^0.24.0"

[tool.poetry.group.dev.dependencies]
superagentx = "^0.1.6"
superagentx = "^0.1.12"

[tool.pytest.ini_options]
asyncio_mode = "auto"
Expand Down
2 changes: 2 additions & 0 deletions superagentx_handlers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,3 +7,5 @@
from .sql import SQLHandler
from .weather import WeatherHandler
from .wikipedia import WikipediaHandler
from .twitter import TwitterHandler
from .websitecrawler import *
1 change: 1 addition & 0 deletions superagentx_handlers/ecommerce/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from .amazon import AmazonHandler
from .best_buy import BestbuyHandler
from .fake_amazon import FakeAmazonHandler
from .fake_flipkart import FakeFlipkartHandler
from .flipkart import FlipkartHandler
Expand Down
103 changes: 103 additions & 0 deletions superagentx_handlers/ecommerce/best_buy.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
import aiohttp
from superagentx.handler.base import BaseHandler
from superagentx.handler.decorators import tool
from superagentx.utils.helper import iter_to_aiter

BASE_URL = "https://api.bestbuy.com/v1/products"

SHOW_OPTIONS = (
"show=customerReviewAverage,"
"customerReviewCount,"
"dollarSavings,"
"url,"
"image,"
"includedItemList.includedItem,"
"modelNumber,"
"name,"
"onlineAvailability,"
"onSale,"
"percentSavings,"
"regularPrice,"
"salePrice,"
"sku,"
"thumbnailImage"
)
DEFAULT_PAGINATION = "pageSize=100"
RESPONSE_FORMAT = "format=json"


class BestBuyError(Exception):
pass


class BestbuyHandler(BaseHandler):
"""
A handler for interacting with the Best Buy API.
This class provides methods to retrieve information about products from
Best Buy's inventory using the API, with options for customization such
as search filters, pagination, and response formatting.
Attributes:
api_key (str): The API key used for authenticating requests to the
Best Buy API.
Methods:
get_best_buy_info(search_text: str, show_options: str = SHOW_OPTIONS,
pagination: str = DEFAULT_PAGINATION,
response_format: str = RESPONSE_FORMAT) -> dict:
Asynchronously retrieves product information from the Best Buy API
based on the provided search text and optional parameters for
customization.
"""

def __init__(
self,
*,
api_key: str
):
super().__init__()
self.api_key = api_key

@tool
async def get_best_buy_info(
self,
search_text: str
):
"""
Fetches product information from the Best Buy API based on the search text.
Args:
search_text (str): The keyword or query string to search for products.
"""

search_keyword = f"((search={search_text}))" if search_text else ""

url = (
f"{BASE_URL}"
f"{search_keyword}?"
f"{SHOW_OPTIONS}"
f"&{RESPONSE_FORMAT}"
f"&{DEFAULT_PAGINATION}"
f"&apiKey={self.api_key}"
)
try:
async with aiohttp.ClientSession() as session:
async with session.get(url=url) as resp:
if resp.status == 200:
data = await resp.json()
products = data['products']
if products:
return [
{
'title': item.get('name'),
'link': item.get('url'),
'saleprice': item.get('salePrice'),
'oldprice': item.get('regularPrice'),
'reviews': item.get('customerReviewCount')
}
async for item in iter_to_aiter(products)
]
raise BestBuyError(await resp.text())
except Exception as ex:
raise BestBuyError(ex)
3 changes: 2 additions & 1 deletion superagentx_handlers/scrape.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,8 @@ async def scrap_content(
"""
async with AsyncWebCrawler(verbose=True) as crawler:
results = await crawler.arun_many(
urls=domain_urls
urls=domain_urls,
bypass_cache=True
)
if results:
return [
Expand Down
87 changes: 87 additions & 0 deletions superagentx_handlers/twitter.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
import logging
import os

import pyshorteners
from superagentx.handler.base import BaseHandler
from superagentx.handler.decorators import tool
from superagentx.utils.helper import sync_to_async
from tweepy.asynchronous import AsyncClient

logger = logging.getLogger(__name__)


class TwitterHandler(BaseHandler):

def __init__(
self,
*,
api_key: str | None = None,
api_secret_key: str | None = None,
access_token: str | None = None,
access_token_secret: str | None = None
):
super().__init__()
# Define client as an instance attribute
self.client = AsyncClient(
consumer_key=api_key or os.getenv("CONSUMER_KEY"),
consumer_secret=api_secret_key or os.getenv("CONSUMER_SECRET"),
access_token=access_token or os.getenv("ACCESS_TOKEN"),
access_token_secret=access_token_secret or os.getenv("ACCESS_TOKEN_SECRET")
)
self._tinyurl = pyshorteners.Shortener(timeout=5).tinyurl

async def _get_shortener_url(self, link: str):
return await sync_to_async(
self._tinyurl.short,
link
)

@tool
async def post_tweet(
self,
text: str,
link: str = None,
hash_tags: list[str] = None,
user_tags: list[str] = None
):
"""
posts a tweet with optional hashtags and user tags.
Parameters:
-----------
text : str
The main content of the tweet. This is a required parameter.
link: str, optional
A valid website link to include in the tweet. Default to None.
hash_tags : list[str], optional
A list of hashtags to include in the tweet. Each hashtag should be a string without the `#` symbol.
Defaults to an empty.
user_tags : list[str], optional
A list of Twitter usernames (without the `@` symbol) to mention in the tweet.
Defaults to an empty.
Returns:
dict
A dictionary containing the response from the tweet ID, text, and meta etc...
```
"""
if not text:
logger.error("Tweet text cannot be empty.")
raise ValueError("Tweet text cannot be empty.")

join_hashtags = " ".join(f"#{x}" for x in hash_tags or [])
join_user_tags = " ".join(f"@{x}" for x in user_tags or [])

if link:
text = f'{text} {await self._get_shortener_url(link)}'
if join_user_tags:
text = f'{join_user_tags} {text}'
if join_hashtags:
text = f'{join_hashtags} {text}'

logger.debug(f'Tweet Text Length {len(text)} and Text => \n\t{text}')
response = await self.client.create_tweet(text=text)
return response.data
1 change: 1 addition & 0 deletions superagentx_handlers/websitecrawler/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
from .amazon import AmazonWebHandler
Loading

0 comments on commit 3cfd090

Please sign in to comment.