diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..4c49bd7 --- /dev/null +++ b/.dockerignore @@ -0,0 +1 @@ +.env diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..cbb9c27 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,146 @@ +name: Build + +env: + APP_NAME: CMS-BACKEND-API + PROJECT_NAME: CMS-BACKEND-API + DOCKER_COMPOSE_PATH: /root/app/docker-compose.yml + REGISTRY: ghcr.io + DOCKER_REGISTRY: ghcr.io/code4govtech/dmp-cms-backend-api + DOT_ENV_FILE_NAME: env.dmp-cms-backend-api + + +on: + workflow_dispatch: + push: + branches: + - devops + - dev + - main + +permissions: + contents: write + packages: write + + +jobs: + set_vars: + name: Set Environment Variables + runs-on: ubuntu-latest + outputs: + TAG_LATEST: ${{ steps.tag_values.outputs.TAG_LATEST }} + TAG_ENV_COMMIT: ${{ steps.tag_values.outputs.TAG_ENV_COMMIT }} + APP_ENV: ${{ steps.tag_values.outputs.APP_ENV }} + steps: + - name: Set Docker Image Tags + id: tag_values + run: | + case "${{ github.ref }}" in + 'refs/heads/main') + echo "TAG_LATEST=prod-latest" >> $GITHUB_OUTPUT + echo "TAG_ENV_COMMIT=prod-${GITHUB_SHA:0:5}" >> $GITHUB_OUTPUT + echo "APP_ENV=PROD" >> $GITHUB_OUTPUT + ;; + 'refs/heads/devops') + echo "TAG_LATEST=dev-latest" >> $GITHUB_OUTPUT + echo "TAG_ENV_COMMIT=dev-${GITHUB_SHA:0:5}" >> $GITHUB_OUTPUT + echo "APP_ENV=DEV" >> $GITHUB_OUTPUT + ;; + 'refs/heads/dev') + echo "TAG_LATEST=dev-latest" >> $GITHUB_OUTPUT + echo "TAG_ENV_COMMIT=dev-${GITHUB_SHA:0:5}" >> $GITHUB_OUTPUT + echo "APP_ENV=DEV" >> $GITHUB_OUTPUT + ;; + esac + + build: + name: Build + runs-on: ubuntu-latest + needs: [set_vars] + permissions: + contents: read + packages: write + env: + TAG_LATEST: ${{ needs.set_vars.outputs.TAG_LATEST }} + TAG_ENV_COMMIT: ${{ needs.set_vars.outputs.TAG_ENV_COMMIT }} + SUPABASE_URL: ${{ vars[format('APP_{0}_SUPABASE_URL', needs.set_vars.outputs.APP_ENV)] }} + SUPABASE_KEY: ${{ secrets[format('APP_{0}_SUPABASE_KEY', needs.set_vars.outputs.APP_ENV)] }} + SECRET_KEY: ${{ secrets[format('APP_{0}_SECRET_KEY', needs.set_vars.outputs.APP_ENV)] }} + steps: + - name: Checkout code + uses: actions/checkout@v2 + + # - name: Login to GitHub Packages + # run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login docker.pkg.github.com -u ${{ github.actor }} --password-stdin + + - name: Log in to the Container registry + uses: docker/login-action@65b78e6e13532edd9afa3aa52ac7964289d1a9c1 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Set Docker Tags + uses: actions/setup-node@v2 + + - name: Read Secrets + run: | + echo "SUPABASE_URL=${SUPABASE_URL}" >> .env + echo "SUPABASE_KEY=${SUPABASE_KEY}" >> .env + echo "SECRET_KEY=${SECRET_KEY}" >> .env + mv .env ${{ env.DOT_ENV_FILE_NAME }} + + - name: Copy env file to DEV Server + uses: appleboy/scp-action@v0.1.7 + if: needs.set_vars.outputs.APP_ENV == 'DEV' + with: + host: ${{ vars.DEV_SERVER_HOST }} + username: ${{ vars.DEV_SERVER_USERNAME }} + key: ${{ secrets.DEV_SSH_PRIVATE_KEY }} + port: ${{ vars.DEV_SERVER_PORT }} + source: "${{ env.DOT_ENV_FILE_NAME }}" + target: /root/app/ + + - name: Build ${{ env.APP_NAME }} Docker image + run: | + docker build -t ${{ env.DOCKER_REGISTRY }}:${{ env.TAG_LATEST }} . + + - name: Add tag to Docker image + run: | + echo ${{ github.sha }} + docker tag ${{ env.DOCKER_REGISTRY }}:${{ env.TAG_LATEST }} ${{ env.DOCKER_REGISTRY }}:${{ env.TAG_ENV_COMMIT }} + + - name: Push Docker image to GitHub Packages + run: | + docker push ${{ env.DOCKER_REGISTRY }}:${{ env.TAG_LATEST }} + docker push ${{ env.DOCKER_REGISTRY }}:${{ env.TAG_ENV_COMMIT }} + + deploy: + name: Deployment + runs-on: ubuntu-latest + needs: build + if: github.event_name == 'push' && github.ref_type == 'branch' + + steps: + - name: Deploy to DevOps/Dev Environment + if: github.ref == 'refs/heads/devops' || github.ref == 'refs/heads/dev' + uses: appleboy/ssh-action@v1.0.3 + env: + DOCKER_COMPOSE_PATH: ${{ env.DOCKER_COMPOSE_PATH }} + APP_NAME: ${{ env.APP_NAME }} + DOCKER_REGISTRY: ${{ env.DOCKER_REGISTRY }} + with: + host: ${{ vars.DEV_SERVER_HOST }} + username: ${{ vars.DEV_SERVER_USERNAME }} + key: ${{ secrets.DEV_SSH_PRIVATE_KEY }} + port: ${{ vars.DEV_SERVER_PORT }} + allenvs: true + script_stop: true + envs: DOCKER_COMPOSE_PATH,APP_NAME,DOCKER_REGISTRY + script: | + echo "Docker Compose Path $DOCKER_COMPOSE_PATH" + docker compose -f $DOCKER_COMPOSE_PATH pull + docker compose -f $DOCKER_COMPOSE_PATH up -d + + - name: Deploy to Prod environment + if: github.ref == 'refs/heads/main' + run: echo "Deploying to Kubernetes" diff --git a/.gitignore b/.gitignore index bf3ff3d..b2d0cb0 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,5 @@ dmp_2/__pycache__/* .env env/* - +venv __pycache__/* diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..53f495c --- /dev/null +++ b/Dockerfile @@ -0,0 +1,22 @@ +# Use an official Python runtime as a parent image +FROM python:3.12-slim + +# Set the working directory in the container +WORKDIR /app + +# Copy the current directory contents into the container at /app +COPY . /app + +# Install any needed packages specified in requirements.txt +RUN pip install --no-cache-dir -r requirements.txt + +# Make port 5000 available to the world outside this container +EXPOSE 5000 + +# Define environment variable +ENV FLASK_APP=wsgi.py +ENV FLASK_RUN_HOST=0.0.0.0 + +# Run the application +CMD ["flask", "run"] + diff --git a/app.py b/app.py index 737f120..5f8c60b 100644 --- a/app.py +++ b/app.py @@ -1,35 +1,54 @@ -from flask import Flask, jsonify +from flask import Flask, jsonify,request,url_for from db import SupabaseInterface from collections import defaultdict from flasgger import Swagger +import re,os,traceback +from utils import * +from flask_cors import CORS,cross_origin +from v2_app import v2 app = Flask(__name__) +CORS(app,supports_credentials=True) + Swagger(app) +GITHUB_TOKEN =os.getenv('GITHUB_TOKEN') -@app.route('/api/greeting', methods=['GET']) -def greeting(): - """ - A simple greeting endpoint. - --- - responses: - 200: - description: A greeting message - schema: - type: object - properties: - message: - type: string - example: Hello, welcome to my API! - """ +headers = { + "Accept": "application/vnd.github+json", + "Authorization": f"Bearer {GITHUB_TOKEN}", + "X-GitHub-Api-Version": "2022-11-28" + } + + +# Define a list of routes that should be protected +protected_routes = ['/greeting', '/get-data', '/issues', '/issues/', '/issues//'] +SECRET_KEY =os.getenv('SECRET_KEY') + +protected_routes = [ + re.compile(r'^/greeting$'), + re.compile(r'^/get-data$'), + re.compile(r'^/issues$'), + re.compile(r'^/issues/[^/]+$'), # Matches '/issues/' + re.compile(r'^/issues/[^/]+/[^/]+$') # Matches '/issues//' +] + +@app.route('/greeting', methods=['GET']) +@cross_origin(supports_credentials=True) # added this to my endpoint +def greeting(): response = { 'message': 'Hello, welcome to my API!' } return jsonify(response) + + + -@app.route('/api/get-data', methods=['GET']) +@app.route('/get-data', methods=['GET']) +@cross_origin(supports_credentials=True) +@require_secret_key def get_data(): """ Fetch data from Supabase. @@ -54,27 +73,38 @@ def get_data(): data = response.data return jsonify(data) except Exception as e: - return jsonify({'error': str(e)}), 500 + return jsonify({'error': str(e)}), 200 -def group_by_owner(data): - grouped_data = defaultdict(list) - for record in data: - owner = record['owner'] - grouped_data[owner].append(record) - - - #Arrange data as reponse format - res = [] - for val in grouped_data: - dict_ = {} - dict_['org_name'] = val - dict_['issues'] = grouped_data[val] + + +@app.route('/v1/issues', methods=['GET']) +@require_secret_key +def v1get_issues(): + try: + response = SupabaseInterface().get_instance().client.table('dmp_issue_updates').select('*').execute() + data = response.data + + #group data based on issues + grouped_data = defaultdict(list) + for record in data: + issue_url = record['issue_url'] + grouped_data[issue_url].append({ + 'id': record['id'], + 'name': record['body_text'] + }) + + result = [{'issue_url': issue_url, 'issues': issues} for issue_url, issues in grouped_data.items()] + grouped_data = group_by_owner(result) + return jsonify(grouped_data) - res.append(dict_) + except Exception as e: + error_traceback = traceback.format_exc() + return jsonify({'error': str(e), 'traceback': error_traceback}), 200 - return {"issues":res} -@app.route('/api/issues', methods=['GET']) +@app.route('/issues', methods=['GET']) +@cross_origin(supports_credentials=True) +@require_secret_key def get_issues(): """ Fetch all issues and group by owner. @@ -97,52 +127,90 @@ def get_issues(): type: string """ try: - response = SupabaseInterface().get_instance().client.table('dmp_issue_updates').select('*').execute() - data = response.data - grouped_data = group_by_owner(data) - return jsonify(grouped_data) + # Fetch all issues with their details + response = SupabaseInterface().get_instance().client.table('dmp_orgs').select('*, dmp_issues(*)').execute() + res = [] + + for org in response.data: + obj = {} + issues = org['dmp_issues'] + obj['org_id'] = org['id'] + obj['org_name'] = org['name'] + renamed_issues = [{"id": issue["id"], "name": issue["title"]} for issue in issues] + obj['issues'] = renamed_issues + + res.append(obj) + + return jsonify({"issues": res}) + except Exception as e: - return jsonify({'error': str(e)}), 500 + error_traceback = traceback.format_exc() + return jsonify({'error': str(e), 'traceback': error_traceback}), 500 -@app.route('/api/issues/', methods=['GET']) +@app.route('/issues/', methods=['GET']) +@cross_origin(supports_credentials=True) +@require_secret_key def get_issues_by_owner(owner): """ - Fetch issues by owner. + Fetch organization details by owner's GitHub URL. --- parameters: - name: owner in: path type: string required: true - description: The owner of the issues + description: The owner of the GitHub URL (e.g., organization owner) responses: 200: - description: Issues fetched successfully + description: Organization details fetched successfully schema: - type: array - items: - type: object + type: object + properties: + name: + type: string + description: Name of the organization + description: + type: string + description: Description of the organization + 404: + description: Organization not found + schema: + type: object + properties: + error: + type: string + description: Error message 500: - description: Error fetching issues + description: Error fetching organization details schema: type: object properties: error: type: string + description: Error message """ try: - response = SupabaseInterface().get_instance().client.table('dmp_issue_updates').select('*').eq('owner', owner).execute() + # Construct the GitHub URL based on the owner parameter + org_link = f"https://github.com/{owner}" + + # Fetch organization details from dmp_orgs table + response = SupabaseInterface().get_instance().client.table('dmp_orgs').select('name', 'description').eq('name', owner).execute() + if not response.data: - return jsonify({'error': "No data found"}), 500 - data = response.data - data = [{**item, "name": item["owner"]} for item in data] - return jsonify(data) + return jsonify({'error': "Organization not found"}), 404 + + return jsonify(response.data) + except Exception as e: - return jsonify({'error': str(e)}), 500 + error_traceback = traceback.format_exc() + return jsonify({'error': str(e), 'traceback': error_traceback}), 500 -@app.route('/api/issues//', methods=['GET']) + +@app.route('/issues//', methods=['GET']) +@cross_origin(supports_credentials=True) +@require_secret_key def get_issues_by_owner_id(owner, issue): - """ + """ Fetch issues by owner and issue number. --- parameters: @@ -156,6 +224,7 @@ def get_issues_by_owner_id(owner, issue): type: string required: true description: The issue number + responses: 200: description: Issues fetched successfully @@ -170,15 +239,100 @@ def get_issues_by_owner_id(owner, issue): properties: error: type: string - """ - try: - response = SupabaseInterface().get_instance().client.table('dmp_issue_updates').select('*').eq('owner', owner).eq('issue_number', issue).execute() - if not response.data: - return jsonify({'error': "No data found"}), 500 - data = response.data - return jsonify(data) - except Exception as e: - return jsonify({'error': str(e)}), 500 + + """ + try: + SUPABASE_DB = SupabaseInterface().get_instance() + response = SUPABASE_DB.client.table('dmp_issue_updates').select('*').eq('owner', owner).eq('issue_number', issue).execute() + if not response.data: + return jsonify({'error': "No data found"}), 200 + data = response.data + + final_data = [] + w_learn_url,w_goal_url,avg,cont_details = None,None,None,None + + for val in data: + issue_url = "https://api.github.com/repos/{}/{}/issues/comments".format(val['owner'],val['repo']) + # week_avg ,cont_name,cont_id,w_goal,w_learn,weekby_avgs,org_link = find_week_avg(issue_url) + # mentors_data = find_mentors(val['issue_url']) if val['issue_url'] else {'mentors': [], 'mentor_usernames': []} + + if val['body_text']: + if "Weekly Goals" in val['body_text'] and not w_goal_url: + w_goal_url = val['body_text'] + plain_text_body = markdown2.markdown(val['body_text']) + + tasks = re.findall(r'\[(x| )\]', plain_text_body) + total_tasks = len(tasks) + completed_tasks = tasks.count('x') + + avg = round((completed_tasks/total_tasks)*100) if total_tasks!=0 else 0 + + if "Weekly Learnings" in val['body_text'] and not w_learn_url: + w_learn_url = val['body_text'] + + # mentors = mentors_data['mentors'] + # ment_usernames = mentors_data['mentor_usernames'] + if not cont_details: + cont_details = SUPABASE_DB.client.table('dmp_issues').select('*').eq('repo_url',val['dmp_issue_url']).execute().data + + res = { + "name": owner, + "description": val['description'], + "mentor_name": val['mentor_name'], + "mentor_id": val['mentor_id'] , + "contributor_name":cont_details[0]['contributor_name'] , + "contributor_id": cont_details[0]['contributor_id'], + "org_name": val['owner'], + "org_link": val['org_link'], + "weekly_goals_html": w_goal_url, + "weekly_learnings_html": w_learn_url, + "overall_progress": avg, + "issue_url":val['html_issue_url'], + "pr_details":None + } + + pr_Data = SUPABASE_DB.client.table('dmp_pr_updates').select('*').eq('repo', val['repo']).execute() + transformed = {"pr_details": []} + if pr_Data.data: + for pr in pr_Data.data: + transformed["pr_details"].append({ + "id": pr.get("pr_id", ""), + "name": pr.get("meta_data", ""), + "week": determine_week(pr['created_at']), + "link": pr.get("html_url", ""), + "status": pr.get("status", ""), + }) + + res['pr_details'] = transformed['pr_details'] + + # Adding each week as a separate key + # for week in weekby_avgs: + # res.update(week) + + # final_data.append(res) + + return jsonify(res),200 + + except Exception as e: + error_traceback = traceback.format_exc() + return jsonify({'error': str(e), 'traceback': error_traceback}), 200 + + + +# Before request handler to check for the presence of the secret key +# @app.before_request +def check_secret_key(): + for route_pattern in protected_routes: + if route_pattern.match(request.path): + secret_key = request.headers.get('X-Secret-Key') + if secret_key != SECRET_KEY: + return jsonify({'message': 'Unauthorized access'}), 401 + break # Stop checking if the current route matches + + + +# Register the v2 Blueprint +app.register_blueprint(v2, url_prefix='/v2') if __name__ == '__main__': app.run(debug=True) \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..5d41718 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,11 @@ +version: '3.8' + +services: + web: + build: . + ports: + - "5000:5000" + environment: + FLASK_ENV: ${FLASK_ENV:-development} + SUPABASE_URL: ${SUPABASE_URL} + SUPABASE_KEY: ${SUPABASE_KEY} diff --git a/requirements.txt b/requirements.txt index 80a365b..98244a7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,4 +3,7 @@ httpx==0.27.0 python-dotenv==1.0.1 supabase==2.4.5 gunicorn==22.0.0 -flasgger==0.9.7.1 \ No newline at end of file +flasgger==0.9.7.1 +markdown2==2.4.13 +requests==2.32.2 +flask-cors==4.0.1 \ No newline at end of file diff --git a/sample.env b/sample.env new file mode 100644 index 0000000..1e4d290 --- /dev/null +++ b/sample.env @@ -0,0 +1,2 @@ +SUPABASE_URL="" +SUPABASE_KEY="" \ No newline at end of file diff --git a/utils.py b/utils.py new file mode 100644 index 0000000..37c1e54 --- /dev/null +++ b/utils.py @@ -0,0 +1,267 @@ +import requests,re,markdown2,os +from collections import defaultdict +from datetime import datetime, timedelta +from dateutil import parser +from flask import jsonify,request +from functools import wraps + + +GITHUB_TOKEN =os.getenv('GITHUB_TOKEN') +SECRET_KEY =os.getenv('SECRET_KEY') + + +headers = { + "Accept": "application/vnd.github+json", + "Authorization": f"Bearer {GITHUB_TOKEN}", + "X-GitHub-Api-Version": "2022-11-28" + } + + + +# Custom decorator to validate secret key +def require_secret_key(f): + @wraps(f) + def decorated_function(*args, **kwargs): + secret_key = request.headers.get('X-Secret-Key') + if secret_key != SECRET_KEY: + return jsonify({'message': 'Unauthorized access'}), 401 + return f(*args, **kwargs) + return decorated_function + + +def find_org_data(url): + try: + url_parts = url.split("/") + owner = url_parts[4] + repo = url_parts[5] + + # Fetch repository details to get organization info + repo_url = f"https://api.github.com/repos/{owner}/{repo}" + repo_response = requests.get(repo_url, headers=headers) + repo_data = repo_response.json() + if repo_data: + org_name = repo_data['owner']['login'] + org_id = repo_data['owner']['id'] + else: + org_name = None + org_id = None + return {"org_id":org_id,"org_name":org_name} + + except Exception as e: + return {"org_id":None,"org_name":None} + + + + +def get_issue_details(issue_url): + url_parts = issue_url.split("/") + owner = url_parts[4] + repo = url_parts[5] + issue_number = url_parts[6] + + # GitHub API endpoint to get the issue details + issue_api_url = f"https://api.github.com/repos/{owner}/{repo}/issues" + + # Send GET request to GitHub API with authentication + response = requests.get(issue_api_url, headers=headers) + if response.status_code == 200: + issue_data = response.json() + return [{'id': issue['id'], 'name': issue['title'],'html_url':issue['html_url'],'issue_number':issue['number']} for issue in issue_data if "pull_request" not in issue] + else: + return {'id': None, 'name': None ,'html_url':None,'issue_number':None} + + + +def group_by_owner(data): + res = [] + for record in data: + org_data = find_org_data(record['issue_url']) + dict_ = {} + dict_['org_name'] = org_data['org_name'] + dict_['org_id'] = org_data['org_id'] + dict_['issues'] = get_issue_details(record['issue_url']) + res.append(dict_) + + + # org_dict = defaultdict(lambda: {'issues': [], 'org_id': None, 'org_name': None}) + # for entry in res: + # org_id = entry['org_id'] + # org_name = entry['org_name'] + + # org_dict[org_id]['issues'].extend(entry['issues']) + # org_dict[org_id]['org_id'] = org_id + # org_dict[org_id]['org_name'] = org_name + + + # return list(org_dict.values()) + return res + + +def find_week_data(issue_details): + try: + #find how many weeks in reponse + weekly_updates = [] + for item in issue_details: + if "Weekly Goals" in item["body"]: + week_match = re.search(r'Week \d+', item["body"]) + if week_match: + weekly_updates.append({ + "id": item["id"], + "val":item, + "week": week_match.group(0) + }) + + val = [] + + for week in weekly_updates: + + plain_text_body = markdown2.markdown(week['val']['body']) + + tasks = re.findall(r'\[(x| )\]', plain_text_body) + total_tasks = len(tasks) + completed_tasks = tasks.count('x') + + avg = round((completed_tasks/total_tasks)*100) if total_tasks!=0 else 0 + + # week['avg'] = avg + # week['val'] = None + week[str(week['week'])+' percentage'] = avg + del week['val'] + del week['id'] + del week['week'] + val.append(week) + + return val + + except Exception as e: + return {} + + + +def find_week_avg(url): + + response = requests.get(url,headers=headers) + if response.status_code == 200: + issue_details = response.json() + + # week_avgs = find_week_data(issue_details) phase 2 + week_avgs = None + + w_learn_url = None + w_goal_url = None + avg = 0 + for item in issue_details: + + if "Weekly Goals" in item['body']: + w_goal_url = item['body'] + plain_text_body = markdown2.markdown(issue_details[0]['body']) + + tasks = re.findall(r'\[(x| )\]', plain_text_body) + total_tasks = len(tasks) + completed_tasks = tasks.count('x') + + avg = round((completed_tasks/total_tasks)*100) if total_tasks!=0 else 0 + + if "Weekly Learnings" in item['body']: + w_learn_url = item['body'] + + + return avg,issue_details[0]['user']['login'],issue_details[0]['user']['id'],w_goal_url,w_learn_url,week_avgs,issue_details[0]['user']['html_url'] + + +def find_mentors(url): + response = requests.get(url,headers=headers) + + if response.status_code == 200: + issue_details = response.json() + + issue_body = issue_details['body'] + pattern = r"## Mentors\s*([\s\S]+?)\s*##" + disc_pattern = r"## Desc 1\s*([\s\S]+?)\s*##" + disc_match = re.search(disc_pattern, issue_body) + + disc_text = disc_match.group(1).strip() if disc_match else None + + match = re.search(pattern, issue_body) + + if match: + mentors_text = match.group(1).strip() + # Extract individual mentor usernames + mentors = [mentor.strip() for mentor in mentors_text.split(',')] + else: + mentors = [] + api_base_url = "https://api.github.com/users/" + + ment_username = [] + for val in mentors: + url = f"{api_base_url}{val[1:]}" + username = requests.get(url,headers=headers) + + ment_username.append(username.json()['login']) + return { + 'mentors': mentors, + 'mentor_usernames': ment_username, + 'desc':disc_text + } + else: + return { + 'mentors': [], + 'mentor_usernames': [], + 'desc':None + } + +def get_pr_details(url): + try: + issue_url = url + url_parts = issue_url.split("/") + owner = url_parts[4] + repo = url_parts[5] + issue_number = url_parts[7] + + # GitHub API endpoint to get pull requests for the repository + pulls_url = f"https://api.github.com/repos/{owner}/{repo}/pulls" + + # Send GET request to GitHub API with authentication + response = requests.get(pulls_url, headers=headers) + if response.status_code == 200: + pulls = response.json() + return pulls + else: + return [] + + + except Exception as e: + raise Exception + + + + +def get_repo_details(owner, repo): + url = f"https://api.github.com/repos/{owner}/{repo}" + response = requests.get(url,headers=headers) + if response.status_code == 200: + return response.json() + else: + return None + + + +def determine_week(input_date_str, start_date_str='2024-06-11'): + try: + # Convert the start date string to a datetime object + start_date = datetime.strptime(start_date_str, '%Y-%m-%d') + input_date = parser.parse(input_date_str).replace(tzinfo=None) + + # Calculate the difference in days + difference_in_days = (input_date - start_date).days + if difference_in_days < 0: + return "Week 0" + week_number = (difference_in_days // 7) + 1 + + return f"Week {week_number}" + + except Exception as e: + return "Week -1" + + + diff --git a/v2_app.py b/v2_app.py new file mode 100644 index 0000000..6f8ee2a --- /dev/null +++ b/v2_app.py @@ -0,0 +1,103 @@ +import traceback,re +from flask import Blueprint, jsonify, request +import markdown2 +from utils import require_secret_key +from db import SupabaseInterface +from utils import determine_week +from v2_utils import calculate_overall_progress, define_link_data, week_data_formatter + +v2 = Blueprint('v2', __name__) + + +@v2.route('/issues//', methods=['GET']) +@require_secret_key +def get_issues_by_owner_id_v2(owner, issue): + try: + SUPABASE_DB = SupabaseInterface().get_instance() + # Fetch issue updates based on owner and issue number + + url = f"https://github.com/{owner}" + + # import pdb;pdb.set_trace() + actual_owner = SUPABASE_DB.client.table('dmp_orgs').select('id','name','repo_owner').like('name',owner).execute().data + repo_owner =actual_owner[0]['repo_owner'] if actual_owner else "" + #create url with repo owner + url = f"https://github.com/{repo_owner}" if repo_owner else None + + + dmp_issue_id = SUPABASE_DB.client.table('dmp_issues').select('*').like('issue_url', f'%{url}%').eq('id', issue).execute() + if not dmp_issue_id.data: + return jsonify({'error': "No data found"}), 500 + + dmp_issue_id = dmp_issue_id.data[0] + response = SUPABASE_DB.client.table('dmp_issue_updates').select('*').eq('dmp_id', dmp_issue_id['id']).execute() + + if not response.data: + return jsonify({'error': "No data found"}), 500 + + data = response.data + + final_data = [] + w_learn_url,w_goal_url,avg,cont_details,plain_text_body,plain_text_wurl = None,None,None,None,None,None + + + for val in data: + # issue_url = "https://api.github.com/repos/{}/{}/issues/comments".format(val['owner'],val['repo']) + # week_avg ,cont_name,cont_id,w_goal,w_learn,weekby_avgs,org_link = find_week_avg(issue_url) + # mentors_data = find_mentors(val['issue_url']) if val['issue_url'] else {'mentors': [], 'mentor_usernames': []} + + if val['body_text']: + if ("Weekly Goals" in val['body_text'] and not w_goal_url): + w_goal_url = val['body_text'] + plain_text_body = markdown2.markdown(val['body_text']) + tasks = re.findall(r'\[(x| )\]', plain_text_body) + total_tasks = len(tasks) + completed_tasks = tasks.count('x') + avg = round((completed_tasks/total_tasks)*100) if total_tasks!=0 else 0 + + if ("Weekly Learnings" in val['body_text'] and not w_learn_url): + w_learn_url = val['body_text'] + plain_text_wurl = markdown2.markdown(val['body_text']) + + + # mentors = mentors_data['mentors'] + # ment_usernames = mentors_data['mentor_usernames'] + if not cont_details: + cont_details = dmp_issue_id['contributor_username'] + week_data = week_data_formatter(plain_text_body,"Goals") + res = { + "name": dmp_issue_id['title'], + "description": dmp_issue_id['description'], + "mentor": define_link_data(dmp_issue_id['mentor_username']), + "mentor_id": dmp_issue_id['mentor_username'] , + "contributor":define_link_data(cont_details), + # "contributor_id": cont_details[0]['contributor_id'], + "org": define_link_data(repo_owner)[0] if repo_owner else None, + "weekly_goals_html": w_goal_url, + "weekly_learnings_html": w_learn_url, + "overall_progress":calculate_overall_progress(week_data,12), + "issue_url":dmp_issue_id['issue_url'], + "pr_details":None, + "weekly_goals":week_data, + "weekly_learnings":week_data_formatter(plain_text_wurl,"Learnings") + } + + pr_Data = SUPABASE_DB.client.table('dmp_pr_updates').select('*').eq('dmp_id', dmp_issue_id['id']).execute() + transformed = {"pr_details": []} + if pr_Data.data: + for pr in pr_Data.data: + transformed["pr_details"].append({ + "id": pr.get("pr_id", ""), + "name": pr.get("title", ""), + "week": determine_week(pr['created_at']), + "link": pr.get("link", ""), + "status": pr.get("status", ""), + }) + + res['pr_details'] = transformed['pr_details'] + + return jsonify(res),200 + + except Exception as e: + error_traceback = traceback.format_exc() + return jsonify({'error': str(e), 'traceback': error_traceback}), 200 diff --git a/v2_utils.py b/v2_utils.py new file mode 100644 index 0000000..5697aa6 --- /dev/null +++ b/v2_utils.py @@ -0,0 +1,100 @@ +import logging,re,markdown2 + +# Func to create name and link for all mentors and contributors +def define_link_data(usernames): + try: + res = [] + if type(usernames) == list: + for username in usernames: + val = {} + if username[0]=="@": + username = username[1:] + val['name'] = username + val['link'] = "https://github.com/" + username + res.append(val) + + if type(usernames) == str: + if usernames[0]=="@": + usernames = usernames[1:] + val = {} + val['name'] = usernames + val['link'] = "https://github.com/" + usernames + res.append(val) + + return res + + except Exception as e: + logging.info(f"{e}---define_link_data") + return [] + + + +def week_data_formatter(html_content, type): + + try: + # Use regex to find week titles (e.g., Week 1, Week 2) and their corresponding task lists + week_matches = re.findall(r'(Week \d+)', html_content) + tasks_per_week = re.split(r'Week \d+', html_content)[1:] # Split the content by weeks and skip the first empty split + + weekly_updates = [] + + if type == "Learnings": + # tasks_per_week = re.split(r'

Week \d+

', html_content)[1:] + tasks_per_week = re.split(r'(<.*?>Week \d+<.*?>)', html_content)[1:] + tasks_per_week = [tasks_per_week[i] for i in range(1, len(tasks_per_week), 2)] + for i, week in enumerate(week_matches): + task_list_html = tasks_per_week[i] if i < len(tasks_per_week) else "" + weekly_updates.append({ + 'week': i + 1, + 'content': task_list_html.strip() + }) + return weekly_updates + + else: + for i, week in enumerate(week_matches): + task_list_html = tasks_per_week[i] if i < len(tasks_per_week) else "" + + # Adjust regex to capture tasks regardless of the tags around them + tasks = re.findall(r'\[(x|X| )\]\s*(.*?)', task_list_html, re.DOTALL) + + total_tasks = len(tasks) + completed_tasks = sum(1 for task in tasks if task[0] in ['x', 'X']) + task_list = [{"content": task[1].strip(), "checked": task[0] in ['x', 'X']} for task in tasks] + + avg = round((completed_tasks / total_tasks) * 100) if total_tasks != 0 else 0 + + weekly_updates.append({ + 'week': i + 1, + 'progress': avg, + 'tasks': task_list + }) + + return weekly_updates + + except Exception as e: + print(f"Error: {e}") + return [] + + +def calculate_overall_progress(weekly_updates, default_weeks=12): + try: + total_progress = 0 + provided_weeks = len(weekly_updates) + + # Sum the progress of each provided week + for week in weekly_updates: + total_progress += week.get('progress', 0) + + # Add zero progress for the remaining weeks to reach the default weeks + total_weeks = default_weeks + remaining_weeks = default_weeks - provided_weeks + total_progress += remaining_weeks * 0 # Adding zero progress for the remaining weeks + + # Calculate the average progress over the total number of weeks + overall_progress = total_progress / total_weeks if total_weeks > 0 else 0 + + return round(overall_progress, 2) + except Exception as e: + print(f"Error: {e}") + return 0 + \ No newline at end of file diff --git a/wsgi.py b/wsgi.py index 6026b0f..bf4d905 100644 --- a/wsgi.py +++ b/wsgi.py @@ -1,4 +1,4 @@ from app import app if __name__ == "__main__": - app.run() + app.run(host='0.0.0.0')