Skip to content

Commit

Permalink
Merge pull request #63 from Code4GovTech/dev
Browse files Browse the repository at this point in the history
Merging Dev With Main
  • Loading branch information
jaanbaaz authored Jan 13, 2025
2 parents 8d1f6f0 + ef4df9f commit f6c0f95
Show file tree
Hide file tree
Showing 14 changed files with 392 additions and 185 deletions.
57 changes: 57 additions & 0 deletions .github/workflows/build-and-push.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
name: Build and Push Docker Image

on:
push:
branches:
- main
- dev
release:
types: [published]
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}

jobs:
build-and-push:
runs-on: ubuntu-latest
# Sets the permissions granted to the `GITHUB_TOKEN` for the actions in this job.
permissions:
contents: read
packages: write
steps:

- name: Checkout code
uses: actions/checkout@v2

- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2

- name: Log in to the Container registry
uses: docker/login-action@v3
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}

- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@9ec57ed1fcdbf14dcef7dfbe97b2010124a938b7
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
tags: |
# minimal
type=pep440,pattern={{version}},value=${{ github.ref_name }},enable=${{ github.event_name == 'release' }}
# branch event
type=ref,event=branch
type=raw,value=latest,enable=${{ github.event_name == 'release' }}
- name: Build and Push Docker image
uses: docker/build-push-action@v4
with:
# build-args:
context: .
push: true
cache-from: type=gha
cache-to: type=gha,mode=max
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
8 changes: 8 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,10 @@ jobs:
SUPABASE_URL: ${{ vars[format('APP_{0}_SUPABASE_URL', needs.set_vars.outputs.APP_ENV)] }}
SUPABASE_KEY: ${{ secrets[format('APP_{0}_SUPABASE_KEY', needs.set_vars.outputs.APP_ENV)] }}
SECRET_KEY: ${{ secrets[format('APP_{0}_SECRET_KEY', needs.set_vars.outputs.APP_ENV)] }}
POSTGRES_DB_HOST: ${{ secrets[format('APP_{0}_POSTGRES_DB_HOST', needs.set_vars.outputs.APP_ENV)] }}
POSTGRES_DB_NAME: ${{ secrets[format('APP_{0}_POSTGRES_DB_NAME', needs.set_vars.outputs.APP_ENV)] }}
POSTGRES_DB_USER: ${{ secrets[format('APP_{0}_POSTGRES_DB_USER', needs.set_vars.outputs.APP_ENV)] }}
POSTGRES_DB_PASS: ${{ secrets[format('APP_{0}_POSTGRES_DB_PASS', needs.set_vars.outputs.APP_ENV)] }}
steps:
- name: Checkout code
uses: actions/checkout@v2
Expand All @@ -87,6 +91,10 @@ jobs:
echo "SUPABASE_URL=${SUPABASE_URL}" >> .env
echo "SUPABASE_KEY=${SUPABASE_KEY}" >> .env
echo "SECRET_KEY=${SECRET_KEY}" >> .env
echo "POSTGRES_DB_HOST=${POSTGRES_DB_HOST}" >> .env
echo "POSTGRES_DB_NAME=${POSTGRES_DB_NAME}" >> .env
echo "POSTGRES_DB_USER=${POSTGRES_DB_USER}" >> .env
echo "POSTGRES_DB_PASS=${POSTGRES_DB_PASS}" >> .env
mv .env ${{ env.DOT_ENV_FILE_NAME }}
- name: Copy env file to DEV Server
Expand Down
3 changes: 3 additions & 0 deletions .gitmodules
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
[submodule "shared_migrations"]
path = shared_migrations
url = https://github.com/Code4GovTech/shared-models-migrations.git
12 changes: 9 additions & 3 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -5,18 +5,24 @@ FROM python:3.12-slim
WORKDIR /app

# Copy the current directory contents into the container at /app

RUN apt-get update && \
apt-get install -y --no-install-recommends git openssh-client && \
rm -rf /var/lib/apt/lists/*

COPY . /app

RUN --mount=type=ssh git submodule update --init --recursive

# Install any needed packages specified in requirements.txt
RUN pip install --no-cache-dir -r requirements.txt

# Make port 5000 available to the world outside this container
EXPOSE 7000
EXPOSE 5000

# Define environment variable
ENV FLASK_APP=wsgi.py
ENV FLASK_RUN_HOST=0.0.0.0

# Run the application
CMD ["flask", "run", "--host=0.0.0.0", "--port=7000"]

CMD ["flask", "run"]
132 changes: 46 additions & 86 deletions app.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,35 @@
from flask import Flask, jsonify,request,url_for
from db import SupabaseInterface
from collections import defaultdict
from flasgger import Swagger
import re,os,traceback
# from query import PostgresORM
from utils import *
from flask_cors import CORS,cross_origin
from v2_app import v2
from flask_sqlalchemy import SQLAlchemy
from models import db
from shared_migrations.db import get_postgres_uri
from shared_migrations.db.dmp_api import DmpAPIQueries
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
from sqlalchemy.orm import sessionmaker
from sqlalchemy.pool import NullPool



app = Flask(__name__)
CORS(app,supports_credentials=True)


app.config['SQLALCHEMY_DATABASE_URI'] = get_postgres_uri()
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False

# Initialize Async SQLAlchemy
engine = create_async_engine(app.config['SQLALCHEMY_DATABASE_URI'], echo=False,poolclass=NullPool)
async_session = sessionmaker(autocommit=False, autoflush=False, bind=engine, class_=AsyncSession)


db.init_app(app)

Swagger(app)

GITHUB_TOKEN =os.getenv('GITHUB_TOKEN')
Expand Down Expand Up @@ -45,67 +63,12 @@ def greeting():




@app.route('/get-data', methods=['GET'])
@cross_origin(supports_credentials=True)
@require_secret_key
def get_data():
"""
Fetch data from Supabase.
---
responses:
200:
description: Data fetched successfully
schema:
type: array
items:
type: object
500:
description: Error fetching data
schema:
type: object
properties:
error:
type: string
"""
try:
response = SupabaseInterface().get_instance().client.table('dmp_pr_updates').select('*').execute()
data = response.data
return jsonify(data)
except Exception as e:
return jsonify({'error': str(e)}), 200



@app.route('/v1/issues', methods=['GET'])
@require_secret_key
def v1get_issues():
try:
response = SupabaseInterface().get_instance().client.table('dmp_issue_updates').select('*').execute()
data = response.data

#group data based on issues
grouped_data = defaultdict(list)
for record in data:
issue_url = record['issue_url']
grouped_data[issue_url].append({
'id': record['id'],
'name': record['body_text']
})

result = [{'issue_url': issue_url, 'issues': issues} for issue_url, issues in grouped_data.items()]
grouped_data = group_by_owner(result)
return jsonify(grouped_data)

except Exception as e:
error_traceback = traceback.format_exc()
return jsonify({'error': str(e), 'traceback': error_traceback}), 200


@app.route('/issues', methods=['GET'])
@cross_origin(supports_credentials=True)
@require_secret_key
def get_issues():
# @cross_origin(supports_credentials=True)
# @require_secret_key
async def get_issues():
"""
Fetch all issues and group by owner.
---
Expand All @@ -127,30 +90,28 @@ def get_issues():
type: string
"""
try:
# Fetch all issues with their details
response = SupabaseInterface().get_instance().client.table('dmp_orgs').select('*, dmp_issues(*)').execute()
res = []

for org in response.data:
obj = {}
issues = org['dmp_issues']
obj['org_id'] = org['id']
obj['org_name'] = org['name']
renamed_issues = [{"id": issue["id"], "name": issue["title"]} for issue in issues]
obj['issues'] = renamed_issues

res.append(obj)

return jsonify({"issues": res})
# Fetch all issues with their details
print('inside get all issues')
data = await DmpAPIQueries.get_issue_query(async_session)
response = []

for result in data:
response.append({
'org_id': result.org_id,
'org_name': result.org_name,
'issues': result.issues
})

return jsonify({"issues": response})

except Exception as e:
error_traceback = traceback.format_exc()
return jsonify({'error': str(e), 'traceback': error_traceback}), 500

@app.route('/issues/<owner>', methods=['GET'])
@cross_origin(supports_credentials=True)
@require_secret_key
def get_issues_by_owner(owner):
# @cross_origin(supports_credentials=True)
# @require_secret_key
async def get_issues_by_owner(owner):
"""
Fetch organization details by owner's GitHub URL.
---
Expand Down Expand Up @@ -190,16 +151,15 @@ def get_issues_by_owner(owner):
description: Error message
"""
try:
# Construct the GitHub URL based on the owner parameter
org_link = f"https://github.com/{owner}"


# Fetch organization details from dmp_orgs table
response = SupabaseInterface().get_instance().client.table('dmp_orgs').select('name', 'description').eq('name', owner).execute()

if not response.data:
response = await DmpAPIQueries.get_issue_owner(async_session, owner)
if not response:
return jsonify({'error': "Organization not found"}), 404

return jsonify(response.data)

orgs_dict = [org.to_dict() for org in response]

return jsonify(orgs_dict)

except Exception as e:
error_traceback = traceback.format_exc()
Expand Down Expand Up @@ -243,7 +203,7 @@ def get_issues_by_owner_id(owner, issue):
"""
try:
print('inside get issues')
SUPABASE_DB = SupabaseInterface().get_instance()
SUPABASE_DB = DmpAPIQueries.get_instance()
response = SUPABASE_DB.client.table('dmp_issue_updates').select('*').eq('owner', owner).eq('issue_number', issue).execute()
if not response.data:
return jsonify({'error': "No data found"}), 200
Expand Down
61 changes: 0 additions & 61 deletions db.py
Original file line number Diff line number Diff line change
@@ -1,61 +0,0 @@
import os, sys
from typing import Any
from supabase import create_client, Client
from supabase.lib.client_options import ClientOptions
from abc import ABC, abstractmethod

client_options = ClientOptions(postgrest_client_timeout=None)



class SupabaseInterface():

_instance = None

def __init__(self):
if not SupabaseInterface._instance:

# Load environment variables
from dotenv import load_dotenv
load_dotenv()

SUPABASE_URL = os.getenv('SUPABASE_URL')
SUPABASE_KEY = os.getenv('SUPABASE_KEY')
self.client: Client = create_client(SUPABASE_URL, SUPABASE_KEY)
SupabaseInterface._instance = self
else:
SupabaseInterface._instance = self._instance



@staticmethod
def get_instance():
# Static method to retrieve the singleton instance
if not SupabaseInterface._instance:
# If no instance exists, create a new one
SupabaseInterface._instance = SupabaseInterface()
return SupabaseInterface._instance


def readAll(self, table):
data = self.client.table(f"{table}").select("*").execute()
return data.data

def add_data(self, data,table_name):
data = self.client.table(table_name).insert(data).execute()
return data.data

def add_data_filter(self, data, table_name):
# Construct the filter based on the provided column names and values
filter_data = {column: data[column] for column in ['dmp_id','issue_number','owner']}

# Check if the data already exists in the table based on the filter
existing_data = self.client.table(table_name).select("*").eq('dmp_id',data['dmp_id']).execute()

# If the data already exists, return without creating a new record
if existing_data.data:
return "Data already exists"

# If the data doesn't exist, insert it into the table
new_data = self.client.table(table_name).insert(data).execute()
return new_data.data
3 changes: 1 addition & 2 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,8 @@ services:
web:
build: .
ports:
- "7000:7000"
- "5000:5000"
environment:
FLASK_ENV: ${FLASK_ENV:-development}
SUPABASE_URL: ${SUPABASE_URL}
SUPABASE_KEY: ${SUPABASE_KEY}
SECRET_KEY: ${SECRET_KEY}
Loading

0 comments on commit f6c0f95

Please sign in to comment.