Skip to content

Commit

Permalink
Merge branch 'feature-#18' into develop
Browse files Browse the repository at this point in the history
  • Loading branch information
FRYoussef committed Oct 23, 2020
2 parents 2b12d36 + 9c34b0c commit c30e65d
Show file tree
Hide file tree
Showing 72 changed files with 2,864 additions and 96 deletions.
2 changes: 0 additions & 2 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,6 @@ jobs:
pip install flake8
# stop the build if there are Python syntax errors or undefined names
flake8 . --exclude=venv,__pycache__ --count --ignore=E1,E2,E3,E501,W1,W2,W3,W5 --show-source --statistics --max-complexity=10 --max-line-length=100
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
#flake8 . --count --max-complexity=10 --max-line-length=127 --statistics
- name: Test with pytest
run: |
#pip install pytest
Expand Down
24 changes: 22 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
# DAO-Analyzer
It is a tool to visualize DAO metrics. Currently, it shows DAO from [DAOstack](https://daostack.io/), [DAOhaus](https://daohaus.club/).
It is a tool to visualize DAO metrics. Currently, it shows DAO from [DAOstack](https://daostack.io/), [DAOhaus](https://daohaus.club/), and [Aragon](https://aragon.org/).

## Available metrics
* DAOstack:
* DAO:
* Months which the DAO has registered activity
* Reputation Holders:
* New reputation holders
* Active reputation holders
Expand All @@ -20,6 +22,8 @@ It is a tool to visualize DAO metrics. Currently, it shows DAO from [DAOstack](h
* Success rate of the stakes by type

* DAOhaus:
* DAO:
* Months which the DAO has registered activity
* Members:
* New members
* Active members
Expand All @@ -33,8 +37,24 @@ It is a tool to visualize DAO metrics. Currently, it shows DAO from [DAOstack](h
* Proposals outcome
* Proposals type

* Aragon:
* DAO:
* Months which the DAO has registered activity
* Token Holders:
* Active token holders
* Votes:
* New votes
* Votes's outcome
* Casted votes:
* Casted votes by support
* Active voters
* Transactions:
* New transactions
* Aragon apps:
* Installed apps

## Architecture
There is available a class diagram of the [DAOstack app](https://github.com/Grasia/dao-analyzer/blob/master/src/apps/daostack/class_diagram.png), and the [DAOhaus app](https://github.com/Grasia/dao-analyzer/blob/master/src/apps/daohaus/class_diagram.png).
There is available a class diagram of the [DAOstack app](https://github.com/Grasia/dao-analyzer/blob/master/src/apps/daostack/class_diagram.png), the [DAOhaus app](https://github.com/Grasia/dao-analyzer/blob/master/src/apps/daohaus/class_diagram.png), and the [Aragon app](https://github.com/Grasia/dao-analyzer/blob/master/src/apps/aragon/class_diagram.png).

## Download
Enter in your terminal (git must be installed) and write down:
Expand Down
26 changes: 22 additions & 4 deletions cache_scripts/api_requester.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,20 @@
class ApiRequester:

ELEMS_PER_CHUNK: int = 1000

DAOSTACK: int = 0
DAOHAUS: int = 1
__DAOSTACK_URL: str = 'https://api.thegraph.com/subgraphs/name/daostack/master'
__DAOHAUS_URL: str = 'https://api.thegraph.com/subgraphs/name/odyssy-automaton/daohaus'
ARAGON_MAINNET: int = 2
ARAGON_TOKENS: int = 3
ARAGON_VOTING: int = 4
ARAGON_FINANCE: int = 5

__URL_DAOSTACK: str = 'https://api.thegraph.com/subgraphs/name/daostack/master'
__URL_DAOHAUS: str = 'https://api.thegraph.com/subgraphs/name/odyssy-automaton/daohaus'
__URL_ARAGON_MAINNET: str = 'https://api.thegraph.com/subgraphs/name/aragon/aragon-mainnet'
__URL_ARAGON_TOKENS: str = 'https://api.thegraph.com/subgraphs/name/aragon/aragon-tokens-mainnet'
__URL_ARAGON_VOTING: str = 'https://api.thegraph.com/subgraphs/name/aragon/aragon-voting-mainnet'
__URL_ARAGON_FINANCE: str = 'https://api.thegraph.com/subgraphs/name/aragon/aragon-finance-mainnet'


def __init__(self, endpoint: int) -> None:
Expand All @@ -30,9 +40,17 @@ def __get_endpoint(self, endpoint: int) -> str:
url: str = ''

if endpoint is self.DAOSTACK:
url = self.__DAOSTACK_URL
url = self.__URL_DAOSTACK
elif endpoint is self.DAOHAUS:
url = self.__DAOHAUS_URL
url = self.__URL_DAOHAUS
elif endpoint is self.ARAGON_MAINNET:
url = self.__URL_ARAGON_MAINNET
elif endpoint is self.ARAGON_TOKENS:
url = self.__URL_ARAGON_TOKENS
elif endpoint is self.ARAGON_VOTING:
url = self.__URL_ARAGON_VOTING
elif endpoint is self.ARAGON_FINANCE:
url = self.__URL_ARAGON_FINANCE

return url

Expand Down
Empty file.
Empty file.
65 changes: 65 additions & 0 deletions cache_scripts/aragon/collectors/apps.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
"""
Descp: Script to fetch app's data and store it.
Created on: 15-oct-2020
Copyright 2020-2021 Youssef 'FRYoussef' El Faqir El Rhazoui
<[email protected]>
"""

import os
import pandas as pd
from typing import Dict, List
from datetime import datetime, date

from api_requester import ApiRequester


APP_QUERY: str = '{{apps(first: {0}, skip: {1}\
){{id isForwarder isUpgradeable repoName repoAddress organization{{id}} }}}}'

META_KEY: str = 'apps'


def _request_apps(current_rows: int) -> List[Dict]:
requester: ApiRequester = ApiRequester(endpoint=ApiRequester.ARAGON_MAINNET)
print("Requesting App\'s data ...")
start: datetime = datetime.now()

apps: List[Dict] = requester.n_requests(query=APP_QUERY, skip_n=current_rows,
result_key=META_KEY)

print(f'App\'s data requested in {round((datetime.now() - start).total_seconds(), 2)}s')
return apps


def _transform_to_df(apps: List[Dict]) -> pd.DataFrame:
for app in apps:
org: str = app['organization']['id']
del app['organization']
app['organizationId'] = org

return pd.DataFrame(apps)


def update_apps(meta_data: Dict) -> None:
apps: List[Dict] = _request_apps(current_rows=meta_data[META_KEY]['rows'])
df: pd.DataFrame = _transform_to_df(apps=apps)

filename: str = os.path.join('datawarehouse', 'aragon', f'{META_KEY}.csv')

if os.path.isfile(filename):
df.to_csv(filename, mode='a', header=False, index=False)
else:
df.to_csv(filename, index=False)

print(f'Data stored in {filename}.\n')

# update meta
meta_data[META_KEY]['rows'] = meta_data[META_KEY]['rows'] + len(apps)
meta_data[META_KEY]['lastUpdate'] = str(date.today())


if __name__ == '__main__':
meta: dict = {META_KEY: {'rows': 0}}
update_apps(meta_data=meta)
69 changes: 69 additions & 0 deletions cache_scripts/aragon/collectors/cast.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
"""
Descp: Script to fetch Cast data and store it. Cast means vote.
Created on: 16-oct-2020
Copyright 2020-2021 Youssef 'FRYoussef' El Faqir El Rhazoui
<[email protected]>
"""

import os
import pandas as pd
from typing import Dict, List
from datetime import datetime, date

from api_requester import ApiRequester


CAST_QUERY: str = '{{casts(first: {0}, skip: {1}\
){{id voteId voter supports voterStake createdAt vote{{orgAddress appAddress}} }}}}'

META_KEY: str = 'casts'


def _request_casts(current_rows: int) -> List[Dict]:
requester: ApiRequester = ApiRequester(endpoint=ApiRequester.ARAGON_VOTING)
print("Requesting Cast data ...")
start: datetime = datetime.now()

casts: List[Dict] = requester.n_requests(query=CAST_QUERY, skip_n=current_rows,
result_key=META_KEY)

print(f'Cast data requested in {round((datetime.now() - start).total_seconds(), 2)}s')
return casts


def _transform_to_df(casts: List[Dict]) -> pd.DataFrame:
for cast in casts:
org: str = cast['vote']['orgAddress']
app: str = cast['vote']['appAddress']

del cast['vote']

cast['orgAddress'] = org
cast['appAddress'] = app

return pd.DataFrame(casts)


def update_casts(meta_data: Dict) -> None:
casts: List[Dict] = _request_casts(current_rows=meta_data[META_KEY]['rows'])
df: pd.DataFrame = _transform_to_df(casts=casts)

filename: str = os.path.join('datawarehouse', 'aragon', f'{META_KEY}.csv')

if os.path.isfile(filename):
df.to_csv(filename, mode='a', header=False, index=False)
else:
df.to_csv(filename, index=False)

print(f'Data stored in {filename}.\n')

# update meta
meta_data[META_KEY]['rows'] = meta_data[META_KEY]['rows'] + len(casts)
meta_data[META_KEY]['lastUpdate'] = str(date.today())


if __name__ == '__main__':
meta: dict = {META_KEY: {'rows': 0}}
update_casts(meta_data=meta)
60 changes: 60 additions & 0 deletions cache_scripts/aragon/collectors/mini_me_token.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
"""
Descp: Script to fetch MiniMeToken's data and store it.
Created on: 15-oct-2020
Copyright 2020-2021 Youssef 'FRYoussef' El Faqir El Rhazoui
<[email protected]>
"""

import os
import pandas as pd
from typing import Dict, List
from datetime import datetime, date

from api_requester import ApiRequester


MINI_ME_TOKEN_QUERY: str = '{{miniMeTokens(first: {0}, skip: {1}\
){{id address totalSupply transferable name symbol orgAddress appAddress}}}}'

META_KEY: str = 'miniMeTokens'


def _request_mini_me_tokens(current_rows: int) -> List[Dict]:
requester: ApiRequester = ApiRequester(endpoint=ApiRequester.ARAGON_TOKENS)
print("Requesting Mini me token\'s data ...")
start: datetime = datetime.now()

tokens: List[Dict] = requester.n_requests(query=MINI_ME_TOKEN_QUERY, skip_n=current_rows,
result_key=META_KEY)

print(f'Mini me token\'s data requested in {round((datetime.now() - start).total_seconds(), 2)}s')
return tokens


def _transform_to_df(tokens: List[Dict]) -> pd.DataFrame:
return pd.DataFrame(tokens)


def update_tokens(meta_data: Dict) -> None:
tokens: List[Dict] = _request_mini_me_tokens(current_rows=meta_data[META_KEY]['rows'])
df: pd.DataFrame = _transform_to_df(tokens=tokens)

filename: str = os.path.join('datawarehouse', 'aragon', f'{META_KEY}.csv')

if os.path.isfile(filename):
df.to_csv(filename, mode='a', header=False, index=False)
else:
df.to_csv(filename, index=False)

print(f'Data stored in {filename}.\n')

# update meta
meta_data[META_KEY]['rows'] = meta_data[META_KEY]['rows'] + len(tokens)
meta_data[META_KEY]['lastUpdate'] = str(date.today())


if __name__ == '__main__':
meta: dict = {META_KEY: {'rows': 0}}
update_tokens(meta_data=meta)
68 changes: 68 additions & 0 deletions cache_scripts/aragon/collectors/organizations.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
"""
Descp: Script to fetch organization's data and store it.
Created on: 15-oct-2020
Copyright 2020-2021 Youssef 'FRYoussef' El Faqir El Rhazoui
<[email protected]>
"""

import os
import pandas as pd
from typing import Dict, List
from datetime import datetime, date

from api_requester import ApiRequester


ORGANIZATION_QUERY: str = '{{organizations(first: {0}, skip: {1}\
){{id createdAt recoveryVault}}}}'

META_KEY: str = 'organizations'


def _request_organizations(current_rows: int) -> List[Dict]:
requester: ApiRequester = ApiRequester(endpoint=ApiRequester.ARAGON_MAINNET)
print("Requesting Organization\'s data ...")
start: datetime = datetime.now()

orgs: List[Dict] = requester.n_requests(query=ORGANIZATION_QUERY, skip_n=current_rows,
result_key=META_KEY)

print(f'Organization\'s data requested in {round((datetime.now() - start).total_seconds(), 2)}s')
return orgs


def _transform_to_df(orgs: List[Dict]) -> pd.DataFrame:
if not orgs:
return pd.DataFrame()

df: pd.DataFrame = pd.DataFrame(orgs)

#TODO: temporal solution to non-attribute name
df['name'] = df['id'].tolist()

return df


def update_organizations(meta_data: Dict) -> None:
orgs: List[Dict] = _request_organizations(current_rows=meta_data[META_KEY]['rows'])
df: pd.DataFrame = _transform_to_df(orgs=orgs)

filename: str = os.path.join('datawarehouse', 'aragon', f'{META_KEY}.csv')

if os.path.isfile(filename):
df.to_csv(filename, mode='a', header=False, index=False)
else:
df.to_csv(filename, index=False)

print(f'Data stored in {filename}.\n')

# update meta
meta_data[META_KEY]['rows'] = meta_data[META_KEY]['rows'] + len(orgs)
meta_data[META_KEY]['lastUpdate'] = str(date.today())


if __name__ == '__main__':
meta: dict = {META_KEY: {'rows': 0}}
update_organizations(meta_data=meta)
Loading

0 comments on commit c30e65d

Please sign in to comment.