From 40af9c6c038be9ba85eafb454aac9f7fd706a717 Mon Sep 17 00:00:00 2001 From: sasi Date: Wed, 24 Jul 2024 16:04:46 +0530 Subject: [PATCH 01/32] test case for all function --- test.py | 134 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 134 insertions(+) create mode 100644 test.py diff --git a/test.py b/test.py new file mode 100644 index 0000000..e743409 --- /dev/null +++ b/test.py @@ -0,0 +1,134 @@ +import httpx,logging,os,unittest +from app import app +from db import SupabaseInterface +from app import define_issue_description_update ,define_pr_update,define_issue_update + + +# Suppress asyncio debug messages +logging.getLogger('asyncio').setLevel(logging.CRITICAL) + +# Optionally, you can also suppress other debug messages if needed +logging.basicConfig(level=logging.CRITICAL) + +class CustomTestResult(unittest.TextTestResult): + def addSuccess(self, test): + super().addSuccess(test) + print(f"{test._testMethodName} - passed") + + +class CustomTestRunner(unittest.TextTestRunner): + resultclass = CustomTestResult + +class TestDMPUpdates(unittest.IsolatedAsyncioTestCase): + + async def asyncSetUp(self): + self.app = app + self.client = self.app.test_client() + self.db = SupabaseInterface().get_instance() + self.issue_response = None + self.comments_response = None + self.pr_response = None + + # Fetch dmp issues from the database + dmp_tickets = self.db.get_dmp_issues() + if not dmp_tickets: + self.skipTest("No dmp_tickets found") + + # Use the first dmp ticket to form the URL + dmp = dmp_tickets[0] + self.dmp_id = dmp['id'] + self.issue_number = dmp['issue_number'] + self.repo = dmp['repo'] + self.owner = dmp['dmp_orgs']['repo_owner'] + + GITHUB_TOKEN = os.getenv('GITHUB_TOKEN') # Ensure this is correctly set in your environment + self.description_url = f"https://api.github.com/repos/{self.owner}/{self.repo}/issues/{self.issue_number}" + self.comments_url = f"https://api.github.com/repos/{self.owner}/{self.repo}/issues/{self.issue_number}/comments?page=1" + self.pr_url = f"https://api.github.com/repos/{self.owner}/{self.repo}/pulls?state=all" + + headers = { + "Accept": "application/vnd.github+json", + "Authorization": f"Bearer {GITHUB_TOKEN}", + "X-GitHub-Api-Version": "2022-11-28" + } + + async with httpx.AsyncClient() as client: + # Test GitHub API call for issue description + issue_response = await client.get(self.description_url, headers=headers) + if issue_response.status_code == 200: + self.issue_response = issue_response.json() + else: + self.skipTest(f"GitHub API call failed with status code {issue_response.status_code}") + + # Test GitHub API call for comments + comments_response = await client.get(self.comments_url, headers=headers) + if comments_response.status_code == 200: + self.comments_response = comments_response.json() + else: + self.skipTest(f"GitHub comments API call failed with status code {comments_response.status_code}") + + # Test GitHub API call for PRs + pr_response = await client.get(self.pr_url, headers=headers) + if pr_response.status_code == 200: + self.pr_response = pr_response.json() + else: + self.skipTest(f"GitHub PRs API call failed with status code {pr_response.status_code}") + + async def test_github_api_call(self): + # Check if the GitHub API call was successful and set the response + self.assertIsNotNone(self.issue_response, "No issue response was fetched") + self.assertEqual(self.issue_response['state'], 'open', "Issue state should be open") + + async def test_github_comments_call(self): + # Check if the GitHub comments API call was successful and set the response + self.assertIsNotNone(self.comments_response, "No comments response was fetched") + self.assertIsInstance(self.comments_response, list, "The comments response should be a list") + self.assertTrue(len(self.comments_response) >= 0, "The comments list should not be negative") + + async def test_github_prs_call(self): + # Check if the GitHub PRs API call was successful and set the response + self.assertIsNotNone(self.pr_response, "No PRs response was fetched") + self.assertIsInstance(self.pr_response, list, "The PRs response should be a list") + self.assertTrue(len(self.pr_response) >= 0, "The PRs list should not be negative") + + def test_define_issue_description_update(self): + # Ensure the response was set + self.assertIsNotNone(self.issue_response, "No issue response was fetched") + + # Call the function to test + issue_update = define_issue_description_update(self.issue_response) + + # Check if the function returns a non-empty result + self.assertIsInstance(issue_update, dict, "The result should be a dictionary") + self.assertTrue(len(issue_update) > 0, "The result should not be an empty dictionary") + + def test_define_pr_update(self): + # Ensure the response was set + self.assertIsNotNone(self.pr_response, "No pr response was fetched") + + # Call the function to test + pr_response = define_pr_update(self.pr_response[0],self.dmp_id) + + # Check if the function returns a non-empty result + self.assertIsInstance(pr_response, dict, "The result should be a dictionary") + self.assertTrue(len(pr_response) > 0, "The result should not be an empty dictionary") + + def test_define_issue_update(self): + # Ensure the response was set + self.assertIsNotNone(self.comments_response, "No pr response was fetched") + + # Call the function to test + issue_response = define_issue_update(self.comments_response[0],self.dmp_id) + + # Check if the function returns a non-empty result + self.assertIsInstance(issue_response, dict, "The result should be a dictionary") + self.assertTrue(len(issue_response) > 0, "The result should not be an empty dictionary") + + def test_get_dmp_issues(self): + # Fetch dmp issues from the database + dmp_tickets = self.db.get_dmp_issues() + self.assertTrue(len(dmp_tickets) > 0, "No dmp_tickets found") + + +if __name__ == '__main__': + unittest.main(testRunner=CustomTestRunner()) From 156b66d5dd165017331476896475b2c65da09a23 Mon Sep 17 00:00:00 2001 From: sasi Date: Fri, 2 Aug 2024 12:05:38 +0530 Subject: [PATCH 02/32] supabase query changes --- app.py | 39 ++++--- db.py | 47 +++++++++ query.py | 262 +++++++++++++++++++++++++++++++++++++++++++++++ requirements.txt | 4 +- test.py | 7 +- utils.py | 16 +-- 6 files changed, 353 insertions(+), 22 deletions(-) create mode 100644 query.py diff --git a/app.py b/app.py index 39655c6..c436ab8 100644 --- a/app.py +++ b/app.py @@ -6,6 +6,7 @@ from apscheduler.schedulers.asyncio import AsyncIOScheduler from dotenv import load_dotenv from datetime import datetime +from query import PostgresQuery from utils import handle_week_data, parse_issue_description @@ -88,13 +89,13 @@ async def dmp_updates(): GITHUB_TOKEN = os.getenv('GITHUB_TOKEN') try: TARGET_DATE = os.getenv('TARGET_DATE') - db = SupabaseInterface().get_instance() # Loop through all dmp issues - dmp_tickets = db.get_dmp_issues() + dmp_tickets = PostgresQuery.get_all_dmp_issues() + for dmp in dmp_tickets: - dmp_id = dmp['id'] + dmp_id = dmp['id'] print('processing dmp ids ', dmp_id) issue_number = dmp['issue_number'] repo = dmp['repo'] @@ -125,8 +126,10 @@ async def dmp_updates(): issue_update['contributor_username'] = dmp['contributor_username'] #get from db app.logger.info('Decription from remote: ', issue_update) - update_data = db.update_data( - issue_update, 'dmp_issues', 'id', dmp_id) + + update_data = PostgresQuery.update_data(issue_update, 'dmp_issues', 'id', dmp_id) + + print(f"dmp_issue update works - dmp_id {dmp_id}") if update_data else print(f"dmp_issue update failed - dmp_id {dmp_id}") app.logger.info(update_data) else: print('issue response ', issue_response) @@ -151,7 +154,7 @@ async def dmp_updates(): if comments_array == [] or len(comments_array)==0: break for val in comments_response.json(): - # Handle if any of the comments are week data + # Handle if any of the comments are week data plain_text_body = markdown2.markdown(val['body']) if "Weekly Goals" in plain_text_body and not week_update_status: week_update_status = handle_week_data(val, dmp['issue_url'], dmp_id, issue_update['mentor_username']) @@ -160,12 +163,16 @@ async def dmp_updates(): week_learning_status = handle_week_data(val, dmp['issue_url'], dmp_id, issue_update['mentor_username']) # Parse comments - comment_update = define_issue_update( - val, dmp_id=dmp_id) - app.logger.info( - 'Comment from remote: ', comment_update) - upsert_comments = db.upsert_data( - comment_update, 'dmp_issue_updates') + comment_update = define_issue_update(val, dmp_id=dmp_id) + app.logger.info('Comment from remote: ', comment_update) + + #get created_at + created_timestamp = PostgresQuery.get_timestamp('dmp_issue_updates','created_at','comment_id',comment_update['comment_id']) + comment_update['created_at'] = datetime.utcnow() if not created_timestamp else created_timestamp + comment_update['comment_updated_at'] = datetime.utcnow() + + upsert_comments = PostgresQuery.upsert_data(comment_update, 'dmp_issue_updates', 'comment_id') + print(f"dmp_issue_updates works dmp_id - {dmp_id}") if upsert_comments else print(f"comment failed dmp_id - {dmp_id}") app.logger.info(upsert_comments) else: print('issue response ', issue_response) @@ -188,8 +195,12 @@ async def dmp_updates(): pr_created_at = pr_val['created_at'] if (pr_created_at >= TARGET_DATE): pr_data = define_pr_update(pr_val, dmp_id) - upsert_pr = db.upsert_data( - pr_data, 'dmp_pr_updates') + + created_timestamp = PostgresQuery.get_timestamp('dmp_pr_updates','created_at','pr_id',pr_data['pr_id']) + pr_data['created_at'] = datetime.utcnow() if not created_timestamp else created_timestamp + + upsert_pr = PostgresQuery.upsert_data(pr_data, 'dmp_pr_updates', 'pr_id') + print(f"dmp_pr_updates works - dmp_id is {dmp_id}") if upsert_pr else print(f"dmp_pr_updates failed - dmp_id is {dmp_id}") app.logger.info(upsert_pr) else: print('issue response ', issue_response) diff --git a/db.py b/db.py index d42dbe8..8a402ee 100644 --- a/db.py +++ b/db.py @@ -4,6 +4,8 @@ from supabase.lib.client_options import ClientOptions from abc import ABC, abstractmethod from dotenv import load_dotenv +import psycopg2,json +from psycopg2.extras import RealDictCursor load_dotenv() @@ -36,6 +38,51 @@ def get_instance(): SupabaseInterface._instance = SupabaseInterface() return SupabaseInterface._instance + def get_postgres_connection(): + + # Database configuration + DB_HOST =os.getenv('POSTGRES_DB_HOST') + DB_NAME =os.getenv('POSTGRES_DB_NAME') + DB_USER =os.getenv('POSTGRES_DB_USER') + DB_PASS =os.getenv('POSTGRES_DB_PASS') + conn = psycopg2.connect( + host=DB_HOST, + database=DB_NAME, + user=DB_USER, + password=DB_PASS + ) + return conn + + def postgres_query(query,params=None): + try: + conn = SupabaseInterface.get_postgres_connection() + cursor = conn.cursor(cursor_factory=RealDictCursor) + + # cursor = conn.cursor() + if not params: + cursor.execute(query) + else: + cursor.execute(query,params) + + try: + rows = cursor.fetchall() + except Exception as e: + rows = [] #only for UPDATE method + + results_as_dicts = [dict(row) for row in rows] + + cursor.close() + conn.close() + return results_as_dicts + + except Exception as e: + print(e) + pass + + + + + def readAll(self, table): data = self.client.table(f"{table}").select("*").execute() diff --git a/query.py b/query.py new file mode 100644 index 0000000..81d7ae5 --- /dev/null +++ b/query.py @@ -0,0 +1,262 @@ +from db import SupabaseInterface + +class PostgresQuery: + + def get_issue_query(): + query = """ + SELECT + dmp_orgs.id AS org_id, + dmp_orgs.name AS org_name, + json_agg( + json_build_object( + 'id', dmp_issues.id, + 'name', dmp_issues.title + ) + ) AS issues + FROM + dmp_orgs + LEFT JOIN + dmp_issues + ON + dmp_orgs.id = dmp_issues.org_id + GROUP BY + dmp_orgs.id + ORDER BY + dmp_orgs.id; + """ + + data = SupabaseInterface.postgres_query(query) + return data + + def get_issue_owner(name): + query = """ + SELECT name, description + FROM dmp_orgs + WHERE name = %s; + """ + data = SupabaseInterface.postgres_query(query,(name,)) + return data + + def get_actual_owner_query(owner): + query = """ + SELECT id, name, repo_owner + FROM dmp_orgs + WHERE name LIKE %s; + """ + + data = SupabaseInterface.postgres_query(query,(f'%{owner}%',)) + return data + + + def get_dmp_issues(issue_id): + + query = """ + SELECT * FROM dmp_issues + WHERE id = %s; + """ + data = SupabaseInterface.postgres_query(query,(issue_id,)) + return data + + def get_all_dmp_issues(): + + query = """SELECT + dmp_issues.*, + json_build_object( + 'created_at', dmp_orgs.created_at, + 'description', dmp_orgs.description, + 'id', dmp_orgs.id, + 'link', dmp_orgs.link, + 'name', dmp_orgs.name, + 'repo_owner', dmp_orgs.repo_owner + ) AS dmp_orgs + FROM + dmp_issues + LEFT JOIN + dmp_orgs + ON + dmp_issues.org_id = dmp_orgs.id + WHERE + dmp_issues.org_id IS NOT NULL + ORDER BY + dmp_issues.id; + + + """ + + data = SupabaseInterface.postgres_query(query) + return data + + def get_dmp_issue_updates(dmp_issue_id): + + query = """ + SELECT * FROM dmp_issue_updates + WHERE dmp_id = %s; + """ + data = SupabaseInterface.postgres_query(query,(dmp_issue_id,)) + return data + + + def get_pr_data(dmp_issue_id): + + query = """ + SELECT * FROM dmp_pr_updates + WHERE dmp_id = %s; + """ + data = SupabaseInterface.postgres_query(query,(dmp_issue_id,)) + return data + + def postgres_query_insert(query, params=None): + try: + conn = SupabaseInterface.get_postgres_connection() + from psycopg2.extras import RealDictCursor + + cursor = conn.cursor(cursor_factory=RealDictCursor) + + if not params: + cursor.execute(query) + else: + cursor.execute(query, params) + + # Check if the query is an update/insert/delete or a select + if query.strip().lower().startswith("select"): + rows = cursor.fetchall() + results_as_dicts = [dict(row) for row in rows] + cursor.close() + conn.close() + return results_as_dicts + else: + # For update/insert/delete, commit the transaction and close cursor + conn.commit() + cursor.close() + conn.close() + return True + + except Exception as e: + print(f"An error occurred:postgres_query_insert {e}") + raise Exception + + + def update_data(data, table_name, match_column, match_value): + try: + # Construct the SQL query + set_clause = ", ".join([f"{key} = %s" for key in data.keys()]) + query = f"UPDATE {table_name} SET {set_clause} WHERE {match_column} = %s" + + # Values to update + values = list(data.values()) + values.append(match_value) + + # Execute the query using postgres_query + PostgresQuery.postgres_query_insert(query, values) + return True + + except Exception as e: + print(f"An error occurred:update_data {e}") + return None + + + + + def insert_data(data, table_name, match_column, match_value): + try: + # Construct the SQL query + set_clause = ", ".join([f"{key} = %s" for key in data.keys()]) + query = f"INSERT INTO {table_name} SET {set_clause} WHERE {match_column} = %s" + + # Values to update + values = list(data.values()) + values.append(match_value) + + # Execute the query using postgres_query + PostgresQuery.postgres_query_insert(query, values) + + return values + except Exception as e: + print(f"An error occurred:upsert_data {e}") + return None + + def upsert_data(data, table_name, conflict_column): + try: + # Construct the SQL query for UPSERT + columns = ', '.join(data.keys()) + placeholders = ', '.join(['%s'] * len(data)) + updates = ', '.join([f"{key} = EXCLUDED.{key}" for key in data.keys()]) + + query = f""" + INSERT INTO {table_name} ({columns}) + VALUES ({placeholders}) + ON CONFLICT ({conflict_column}) + DO UPDATE SET {updates} + """ + + # Values to insert or update + values = list(data.values()) + + # Execute the query using postgres_query + PostgresQuery.postgres_query_insert(query, values) + return values + + except Exception as e: + print(f"An error occurred:upsert_data {e}") + return None + + + def get_timestamp(table_name, col_name, col, value): + try: + query = f""" + SELECT {col_name} FROM {table_name} + WHERE {col} = %s; + """ + data = SupabaseInterface.postgres_query(query, (value,)) + + if data: + return data[0][col_name] + else: + return None + + except Exception as e: + print(f"An error occurred:get_timestamp {e}") + return None + + def check_week_exist(dmp_id, week): + try: + query = """ + SELECT * FROM dmp_week_updates + WHERE dmp_id = %s AND week = %s; + """ + data = SupabaseInterface.postgres_query(query, (dmp_id, week)) + + if data: + return data + else: + return None + + except Exception as e: + print(f"An error occurred:check_week_exist {e}") + return None + + def multiple_update_data(data, table_name, match_columns, match_values): + try: + # Construct the SET clause + set_clause = ", ".join([f"{key} = %s" for key in data.keys()]) + + # Construct the WHERE clause for multiple conditions + where_clause = " AND ".join([f"{col} = %s" for col in match_columns]) + + # Combine the clauses into the final query + query = f""" + UPDATE {table_name} + SET {set_clause} + WHERE {where_clause} + """ + + # Values to update followed by the match values + values = list(data.values()) + match_values + + # Execute the query using postgres_query + val = PostgresQuery.postgres_query_insert(query, values) + return val + + except Exception as e: + print(f"An error occurred:multiple_update_data {e}") + raise Exception diff --git a/requirements.txt b/requirements.txt index 1068de3..28bb127 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,4 +4,6 @@ python-dotenv==1.0.1 Quart==0.19.5 supabase==2.4.5 requests==2.32.3 -markdown2==2.4.13 \ No newline at end of file +markdown2==2.4.13 +postgrest==0.16.4 +psycopg2-binary==2.9.9 \ No newline at end of file diff --git a/test.py b/test.py index e743409..9d6465a 100644 --- a/test.py +++ b/test.py @@ -15,9 +15,14 @@ def addSuccess(self, test): super().addSuccess(test) print(f"{test._testMethodName} - passed") - class CustomTestRunner(unittest.TextTestRunner): resultclass = CustomTestResult + + def run(self, test): + result = super().run(test) + if result.wasSuccessful(): + print("All Testcases Passed") + return result class TestDMPUpdates(unittest.IsolatedAsyncioTestCase): diff --git a/utils.py b/utils.py index 3a57150..0b86175 100644 --- a/utils.py +++ b/utils.py @@ -3,7 +3,7 @@ import logging import markdown2 from db import SupabaseInterface - +from query import PostgresQuery def parse_issue_description(issue_body): # Description is everything before goals. @@ -92,18 +92,22 @@ def handle_week_data(comment, issue_url, dmp_id, mentor_name): exist = db.client.table('dmp_week_updates').select( "*").eq('dmp_id', week_json['dmp_id']).eq('week', week_json['week']).execute() + + exist = PostgresQuery.check_week_exist(week_json['dmp_id'],week_json['week']) + + if not exist: + # add_data = db.add_data(week_json, 'dmp_week_updates') + add_data = PostgresQuery.upsert_data(week_json, 'dmp_week_updates', 'dmp_id') - if not exist.data: - add_data = db.add_data(week_json, 'dmp_week_updates') else: - update_data = db.multiple_update_data(week_json, 'dmp_week_updates', [ - 'dmp_id', 'week'], [week_json['dmp_id'], week_json['week']]) + # update_data = db.multiple_update_data(week_json, 'dmp_week_updates', ['dmp_id', 'week'], [week_json['dmp_id'], week_json['week']]) + update_data = PostgresQuery.multiple_update_data(week_json, 'dmp_week_updates', ['dmp_id', 'week'], [week_json['dmp_id'], week_json['week']]) week_json = {} return True except Exception as e: - print(e) + print(f"Error in week data updates {dmp_id}") logging.info(f"{e} - find_week_data") return False From a695966c5c21d8f35f5364055355156c590539ae Mon Sep 17 00:00:00 2001 From: sasi Date: Fri, 9 Aug 2024 13:41:28 +0530 Subject: [PATCH 03/32] ORM convertion of all query --- app.py | 48 ++++++---- models.py | 142 +++++++++++++++++++++++++++++ query.py | 230 +++++++++++++++++++++++++++++++++++++++++++++++ requirements.txt | 3 +- utils.py | 16 ++-- 5 files changed, 415 insertions(+), 24 deletions(-) create mode 100644 models.py diff --git a/app.py b/app.py index c436ab8..be506d5 100644 --- a/app.py +++ b/app.py @@ -1,21 +1,34 @@ # app.py from quart import Quart -import httpx -import os,markdown2 +import os,markdown2,httpx from db import SupabaseInterface from apscheduler.schedulers.asyncio import AsyncIOScheduler from dotenv import load_dotenv +from datetime import datetime,timezone +from query import PostgresQuery,PostgresORM +from utils import handle_week_data, parse_issue_description +from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession +from sqlalchemy.orm import sessionmaker from datetime import datetime -from query import PostgresQuery +from models import * + + -from utils import handle_week_data, parse_issue_description # Load environment variables from .env file load_dotenv() delay_mins: str = os.getenv("SCHEDULER_DELAY_IN_MINS") + app = Quart(__name__) +# Initialize Quart app +app.config['SQLALCHEMY_DATABASE_URI'] = PostgresORM.get_postgres_uri() + +# Initialize Async SQLAlchemy +engine = create_async_engine(app.config['SQLALCHEMY_DATABASE_URI'], echo=False) +async_session = sessionmaker(autocommit=False, autoflush=False, bind=engine, class_=AsyncSession) + scheduler = AsyncIOScheduler() @@ -82,6 +95,8 @@ def define_pr_update(pr_val, dmp_id): return {} + + @app.route('/dmp_updates') async def dmp_updates(): print( @@ -91,9 +106,8 @@ async def dmp_updates(): TARGET_DATE = os.getenv('TARGET_DATE') # Loop through all dmp issues - dmp_tickets = PostgresQuery.get_all_dmp_issues() - - + dmp_tickets = await PostgresORM.get_all_dmp_issues(async_session) + for dmp in dmp_tickets: dmp_id = dmp['id'] print('processing dmp ids ', dmp_id) @@ -127,8 +141,8 @@ async def dmp_updates(): app.logger.info('Decription from remote: ', issue_update) - update_data = PostgresQuery.update_data(issue_update, 'dmp_issues', 'id', dmp_id) - + update_data = await PostgresORM.update_dmp_issue(async_session,issue_id=dmp_id, update_data=issue_update) + print(f"dmp_issue update works - dmp_id {dmp_id}") if update_data else print(f"dmp_issue update failed - dmp_id {dmp_id}") app.logger.info(update_data) else: @@ -157,10 +171,10 @@ async def dmp_updates(): # Handle if any of the comments are week data plain_text_body = markdown2.markdown(val['body']) if "Weekly Goals" in plain_text_body and not week_update_status: - week_update_status = handle_week_data(val, dmp['issue_url'], dmp_id, issue_update['mentor_username']) + week_update_status = await handle_week_data(val, dmp['issue_url'], dmp_id, issue_update['mentor_username'],async_session) if "Weekly Learnings" in plain_text_body and not week_learning_status: - week_learning_status = handle_week_data(val, dmp['issue_url'], dmp_id, issue_update['mentor_username']) + week_learning_status = await handle_week_data(val, dmp['issue_url'], dmp_id, issue_update['mentor_username'],async_session) # Parse comments comment_update = define_issue_update(val, dmp_id=dmp_id) @@ -169,9 +183,11 @@ async def dmp_updates(): #get created_at created_timestamp = PostgresQuery.get_timestamp('dmp_issue_updates','created_at','comment_id',comment_update['comment_id']) comment_update['created_at'] = datetime.utcnow() if not created_timestamp else created_timestamp - comment_update['comment_updated_at'] = datetime.utcnow() - - upsert_comments = PostgresQuery.upsert_data(comment_update, 'dmp_issue_updates', 'comment_id') + comment_update['comment_updated_at'] = datetime.utcnow().replace(tzinfo=None) + comment_update['created_at'] = comment_update['created_at'].replace(tzinfo=None) + + upsert_comments = await PostgresORM.upsert_data_orm(async_session,comment_update) + print(f"dmp_issue_updates works dmp_id - {dmp_id}") if upsert_comments else print(f"comment failed dmp_id - {dmp_id}") app.logger.info(upsert_comments) else: @@ -198,8 +214,10 @@ async def dmp_updates(): created_timestamp = PostgresQuery.get_timestamp('dmp_pr_updates','created_at','pr_id',pr_data['pr_id']) pr_data['created_at'] = datetime.utcnow() if not created_timestamp else created_timestamp + pr_data['created_at'] = pr_data['created_at'].replace(tzinfo=None) + + upsert_pr = await PostgresORM.upsert_pr_update(async_session,pr_data) - upsert_pr = PostgresQuery.upsert_data(pr_data, 'dmp_pr_updates', 'pr_id') print(f"dmp_pr_updates works - dmp_id is {dmp_id}") if upsert_pr else print(f"dmp_pr_updates failed - dmp_id is {dmp_id}") app.logger.info(upsert_pr) else: diff --git a/models.py b/models.py new file mode 100644 index 0000000..951571b --- /dev/null +++ b/models.py @@ -0,0 +1,142 @@ +from datetime import datetime +from sqlalchemy import Column, Integer, String, Text, DateTime, ForeignKey, func,BigInteger +from sqlalchemy.orm import relationship +from sqlalchemy.ext.declarative import declarative_base + + +Base = declarative_base() + + + +# Define your models +class DmpIssue(Base): + __tablename__ = 'dmp_issues' + + id = Column(Integer, primary_key=True, autoincrement=True) + issue_url = Column(String, nullable=False) + issue_number = Column(Integer, nullable=False) + mentor_username = Column(String, nullable=True) + contributor_username = Column(String, nullable=True) + title = Column(String, nullable=False) + org_id = Column(Integer, ForeignKey('dmp_orgs.id'), nullable=False) + description = Column(Text, nullable=True) + repo = Column(String, nullable=True) + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'id': self.id, + 'issue_url': self.issue_url, + 'issue_number': self.issue_number, + 'mentor_username': self.mentor_username, + 'contributor_username': self.contributor_username, + 'title': self.title, + 'org_id': self.org_id, + 'description': self.description, + 'repo': self.repo + } + +class DmpOrg(Base): + __tablename__ = 'dmp_orgs' + + id = Column(Integer, primary_key=True, autoincrement=True) + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) + name = Column(String, nullable=False) + description = Column(Text, nullable=True) + link = Column(String, nullable=False) + repo_owner = Column(String, nullable=False) + dmp_issues = relationship('DmpIssue', backref='organization', lazy=True) + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'id': self.id, + 'created_at': self.created_at.isoformat(), + 'name': self.name, + 'description': self.description, + 'link': self.link, + 'repo_owner': self.repo_owner + } + + +class DmpIssueUpdate(Base): + __tablename__ = 'dmp_issue_updates' + + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) + body_text = Column(Text, nullable=False) + comment_link = Column(String, nullable=False) + comment_id = Column(BigInteger, primary_key=True, nullable=False) + comment_api = Column(String, nullable=False) + comment_updated_at = Column(DateTime, nullable=False) + dmp_id = Column(Integer, ForeignKey('dmp_orgs.id'), nullable=False) + created_by = Column(String, nullable=False) + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'created_at': self.created_at.isoformat(), + 'body_text': self.body_text, + 'comment_link': self.comment_link, + 'comment_id': self.comment_id, + 'comment_api': self.comment_api, + 'comment_updated_at': self.comment_updated_at.isoformat(), + 'dmp_id': self.dmp_id, + 'created_by': self.created_by + } + +class Prupdates(Base): + __tablename__ = 'dmp_pr_updates' + + created_at = Column(DateTime, nullable=False, default=datetime.utcnow) + pr_id = Column(Integer, nullable=False, primary_key=True) + status = Column(String, nullable=False) + title = Column(String, nullable=False) + pr_updated_at = Column(DateTime, nullable=False, default=datetime.utcnow) + merged_at = Column(DateTime) + closed_at = Column(DateTime) + dmp_id = Column(Integer, ForeignKey('dmp_issues.id'), nullable=False) + link = Column(String, nullable=False) + + def __repr__(self): + return f'' + + def to_dict(self): + return { + 'created_at': self.created_at.isoformat(), + 'pr_id': self.pr_id, + 'status': self.status, + 'title': self.title, + 'pr_updated_at': self.pr_updated_at.isoformat(), + 'merged_at': self.merged_at.isoformat() if self.merged_at else None, + 'closed_at': self.closed_at.isoformat() if self.closed_at else None, + 'dmp_id': self.dmp_id, + 'link': self.link + } + +class DmpWeekUpdate(Base): + __tablename__ = 'dmp_week_updates' + + id = Column(Integer, primary_key=True, autoincrement=True) + issue_url = Column(String, nullable=False) + week = Column(Integer, nullable=False) + total_task = Column(Integer, nullable=False) + completed_task = Column(Integer, nullable=False) + progress = Column(Integer, nullable=False) + task_data = Column(Text, nullable=False) + dmp_id = Column(Integer, nullable=False) + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'id': self.id, + 'week': self.week, + 'dmp_id': self.dmp_id, + } diff --git a/query.py b/query.py index 81d7ae5..76e2ade 100644 --- a/query.py +++ b/query.py @@ -1,4 +1,15 @@ from db import SupabaseInterface +from sqlalchemy.future import select +from sqlalchemy.orm import Session +from sqlalchemy.exc import NoResultFound + +from models import * +from sqlalchemy import update +# from app import async_session +from sqlalchemy.dialects.postgresql import insert +from datetime import datetime, timezone, timedelta +from sqlalchemy.orm import aliased +import os class PostgresQuery: @@ -234,6 +245,7 @@ def check_week_exist(dmp_id, week): except Exception as e: print(f"An error occurred:check_week_exist {e}") return None + def multiple_update_data(data, table_name, match_columns, match_values): try: @@ -260,3 +272,221 @@ def multiple_update_data(data, table_name, match_columns, match_values): except Exception as e: print(f"An error occurred:multiple_update_data {e}") raise Exception + + +class PostgresORM: + + def get_postgres_uri(): + DB_HOST = os.getenv('POSTGRES_DB_HOST') + DB_NAME = os.getenv('POSTGRES_DB_NAME') + DB_USER = os.getenv('POSTGRES_DB_USER') + DB_PASS = os.getenv('POSTGRES_DB_PASS') + + return f'postgresql+asyncpg://{DB_USER}:{DB_PASS}@{DB_HOST}/{DB_NAME}' + + async def get_all_dmp_issues(async_session): + try: + async with async_session() as session: + # Alias for the DmpOrg table to use in the JSON_BUILD_OBJECT + dmp_org_alias = aliased(DmpOrg) + + # Build the query + query = ( + select( + DmpIssue, + func.json_build_object( + 'created_at', dmp_org_alias.created_at, + 'description', dmp_org_alias.description, + 'id', dmp_org_alias.id, + 'link', dmp_org_alias.link, + 'name', dmp_org_alias.name, + 'repo_owner', dmp_org_alias.repo_owner + ).label('dmp_orgs') + ) + .outerjoin(dmp_org_alias, DmpIssue.org_id == dmp_org_alias.id) + .filter(DmpIssue.org_id.isnot(None)) + .order_by(DmpIssue.id) + ) + + # Execute the query and fetch results + result = await session.execute(query) + rows = result.fetchall() + + # Convert results to dictionaries + data = [] + for row in rows: + issue_dict = row._asdict() # Convert row to dict + dmp_orgs = issue_dict.pop('dmp_orgs') # Extract JSON object from row + issue_dict['dmp_orgs'] = dmp_orgs + issue_dict.update(issue_dict['DmpIssue'].to_dict()) + # Add JSON object back to dict + del issue_dict['DmpIssue'] + data.append(issue_dict) + + return data + + except Exception as e: + print(e) + raise Exception + + async def update_dmp_issue(async_session,issue_id: int, update_data: dict): + try: + async with async_session() as session: + async with session.begin(): + # Build the update query + query = ( + update(DmpIssue) + .where(DmpIssue.id == issue_id) + .values(**update_data) + ) + + # Execute the query + await session.execute(query) + await session.commit() + return True + + except Exception as e: + return False + + + async def upsert_data_orm(async_session, update_data): + try: + + async with async_session() as session: + async with session.begin(): + + # Define the insert statement + stmt = insert(DmpIssueUpdate).values(**update_data) + + # Define the update statement in case of conflict + stmt = stmt.on_conflict_do_update( + index_elements=['comment_id'], + set_={ + 'body_text': stmt.excluded.body_text, + 'comment_link': stmt.excluded.comment_link, + 'comment_api': stmt.excluded.comment_api, + 'comment_updated_at': stmt.excluded.comment_updated_at, + 'dmp_id': stmt.excluded.dmp_id, + 'created_by': stmt.excluded.created_by, + 'created_at': stmt.excluded.created_at + } + ) + + # Execute the statement + await session.execute(stmt) + await session.commit() + + return True + + except Exception as e: + print(e) + return False + + + + async def upsert_pr_update(async_session, pr_update_data): + try: + async with async_session() as session: + async with session.begin(): + pr_update_data['pr_updated_at'] = datetime.fromisoformat(pr_update_data['pr_updated_at']).replace(tzinfo=None) if pr_update_data['pr_updated_at'] else None + pr_update_data['merged_at'] = datetime.fromisoformat(pr_update_data['merged_at']).replace(tzinfo=None) if pr_update_data['merged_at'] else None + pr_update_data['closed_at'] = datetime.fromisoformat(pr_update_data['closed_at']).replace(tzinfo=None) if pr_update_data['closed_at'] else None + + # Prepare the insert statement + stmt = insert(Prupdates).values(**pr_update_data) + + # Prepare the conflict resolution strategy + stmt = stmt.on_conflict_do_update( + index_elements=['pr_id'], # Assuming `pr_id` is the unique key + set_={ + 'status': stmt.excluded.status, + 'merged_at': stmt.excluded.merged_at, + 'closed_at': stmt.excluded.closed_at, + 'pr_updated_at': stmt.excluded.pr_updated_at, + 'dmp_id': stmt.excluded.dmp_id, + 'created_at': stmt.excluded.created_at, + 'title': stmt.excluded.title, + 'link': stmt.excluded.link + } + ) + # Execute and commit the transaction + await session.execute(stmt) + await session.commit() + + return True + + except Exception as e: + print(e) + return False + + + + async def update_dmp_week_update(async_session, update_data): + try: + async with async_session() as session: + async with session.begin(): + # Define the filter conditions + stmt = ( + select(DmpWeekUpdate) + .where( + DmpWeekUpdate.week == update_data['week'], + DmpWeekUpdate.dmp_id == update_data['dmp_id'] + ) + ) + + # Fetch the row that needs to be updated + result = await session.execute(stmt) + dmp_week_update = result.scalars().first() + + if dmp_week_update: + # Update the fields with the values from update_data + for key, value in update_data.items(): + setattr(dmp_week_update, key, value) + + # Commit the changes + await session.commit() + return True + except Exception as e: + print(e) + return False + + + + async def get_week_updates(async_session, dmp_id, week): + try: + async with async_session() as session: + # Build the ORM query + stmt = select(DmpWeekUpdate).where( + DmpWeekUpdate.dmp_id == dmp_id, + DmpWeekUpdate.week == week + ) + # Execute the query + result = await session.execute(stmt) + + # Fetch all matching rows + week_updates = result.scalars().all() + + + return True if len(week_updates)>0 else False + + except Exception as e: + return False + + + + async def insert_dmp_week_update(async_session, update_data): + try: + async with async_session() as session: + async with session.begin(): + # Define the insert statement + stmt = insert(DmpWeekUpdate).values(**update_data) + + # Execute the statement + await session.execute(stmt) + await session.commit() + + return True + + except Exception as e: + print(e) + return False \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index 28bb127..6b1ecb2 100644 --- a/requirements.txt +++ b/requirements.txt @@ -6,4 +6,5 @@ supabase==2.4.5 requests==2.32.3 markdown2==2.4.13 postgrest==0.16.4 -psycopg2-binary==2.9.9 \ No newline at end of file +psycopg2-binary==2.9.9 +SQLAlchemy==2.0.32 diff --git a/utils.py b/utils.py index 0b86175..50b864c 100644 --- a/utils.py +++ b/utils.py @@ -3,7 +3,7 @@ import logging import markdown2 from db import SupabaseInterface -from query import PostgresQuery +from query import PostgresQuery,PostgresORM def parse_issue_description(issue_body): # Description is everything before goals. @@ -33,7 +33,7 @@ def parse_issue_description(issue_body): } -def handle_week_data(comment, issue_url, dmp_id, mentor_name): +async def handle_week_data(comment, issue_url, dmp_id, mentor_name,async_session): try: # Get writer of comment and if it is not the selected mentor, return right away # writter = "@"+comment['user']['login'] @@ -93,15 +93,15 @@ def handle_week_data(comment, issue_url, dmp_id, mentor_name): exist = db.client.table('dmp_week_updates').select( "*").eq('dmp_id', week_json['dmp_id']).eq('week', week_json['week']).execute() - exist = PostgresQuery.check_week_exist(week_json['dmp_id'],week_json['week']) + + exist = await PostgresORM.get_week_updates(async_session,week_json['dmp_id'],week_json['week']) if not exist: - # add_data = db.add_data(week_json, 'dmp_week_updates') - add_data = PostgresQuery.upsert_data(week_json, 'dmp_week_updates', 'dmp_id') - + add_data = await PostgresORM.insert_dmp_week_update(async_session,week_json) + print(f"Week data added {week_json['dmp_id']}-{week_json['week']}") if add_data else None else: - # update_data = db.multiple_update_data(week_json, 'dmp_week_updates', ['dmp_id', 'week'], [week_json['dmp_id'], week_json['week']]) - update_data = PostgresQuery.multiple_update_data(week_json, 'dmp_week_updates', ['dmp_id', 'week'], [week_json['dmp_id'], week_json['week']]) + update_data = await PostgresORM.update_dmp_week_update(async_session,week_json) + print(f"Week data updated {week_json['dmp_id']}-{week_json['week']}") if update_data else None week_json = {} From cfab860cad9f960bb126001d8caad298f6e4be6d Mon Sep 17 00:00:00 2001 From: sasi Date: Fri, 9 Aug 2024 16:37:30 +0530 Subject: [PATCH 04/32] code remove - supabase --- app.py | 4 +- db.py | 124 ------------------------------------------------------- query.py | 70 ++++++++++++++++++++++++------- test.py | 94 +++++++++++++++++++++++++++++------------ utils.py | 5 --- 5 files changed, 125 insertions(+), 172 deletions(-) diff --git a/app.py b/app.py index be506d5..52ea897 100644 --- a/app.py +++ b/app.py @@ -1,7 +1,6 @@ # app.py from quart import Quart import os,markdown2,httpx -from db import SupabaseInterface from apscheduler.schedulers.asyncio import AsyncIOScheduler from dotenv import load_dotenv from datetime import datetime,timezone @@ -11,6 +10,7 @@ from sqlalchemy.orm import sessionmaker from datetime import datetime from models import * +from sqlalchemy.pool import NullPool @@ -26,7 +26,7 @@ app.config['SQLALCHEMY_DATABASE_URI'] = PostgresORM.get_postgres_uri() # Initialize Async SQLAlchemy -engine = create_async_engine(app.config['SQLALCHEMY_DATABASE_URI'], echo=False) +engine = create_async_engine(app.config['SQLALCHEMY_DATABASE_URI'], echo=False,poolclass=NullPool) async_session = sessionmaker(autocommit=False, autoflush=False, bind=engine, class_=AsyncSession) scheduler = AsyncIOScheduler() diff --git a/db.py b/db.py index 8a402ee..e69de29 100644 --- a/db.py +++ b/db.py @@ -1,124 +0,0 @@ -import os, sys -from typing import Any -from supabase import create_client, Client -from supabase.lib.client_options import ClientOptions -from abc import ABC, abstractmethod -from dotenv import load_dotenv -import psycopg2,json -from psycopg2.extras import RealDictCursor - -load_dotenv() - -client_options = ClientOptions(postgrest_client_timeout=None) - -url: str = os.getenv("SUPABASE_URL") -key: str = os.getenv("SUPABASE_KEY") - -class SupabaseInterface(): - - _instance = None - - def __init__(self): - # Initialize Supabase client upon first instantiation - if not SupabaseInterface._instance: - self.supabase_url =url - self.supabase_key =key - self.client: Client = create_client(self.supabase_url, self.supabase_key, options=client_options) - SupabaseInterface._instance = self - else: - SupabaseInterface._instance = self._instance - - - - @staticmethod - def get_instance(): - # Static method to retrieve the singleton instance - if not SupabaseInterface._instance: - # If no instance exists, create a new one - SupabaseInterface._instance = SupabaseInterface() - return SupabaseInterface._instance - - def get_postgres_connection(): - - # Database configuration - DB_HOST =os.getenv('POSTGRES_DB_HOST') - DB_NAME =os.getenv('POSTGRES_DB_NAME') - DB_USER =os.getenv('POSTGRES_DB_USER') - DB_PASS =os.getenv('POSTGRES_DB_PASS') - conn = psycopg2.connect( - host=DB_HOST, - database=DB_NAME, - user=DB_USER, - password=DB_PASS - ) - return conn - - def postgres_query(query,params=None): - try: - conn = SupabaseInterface.get_postgres_connection() - cursor = conn.cursor(cursor_factory=RealDictCursor) - - # cursor = conn.cursor() - if not params: - cursor.execute(query) - else: - cursor.execute(query,params) - - try: - rows = cursor.fetchall() - except Exception as e: - rows = [] #only for UPDATE method - - results_as_dicts = [dict(row) for row in rows] - - cursor.close() - conn.close() - return results_as_dicts - - except Exception as e: - print(e) - pass - - - - - - - def readAll(self, table): - data = self.client.table(f"{table}").select("*").execute() - return data.data - - def add_data(self, data,table_name): - data = self.client.table(table_name).insert(data).execute() - return data.data - - def update_data(self,data,table_name, match_column, match_value): - response = self.client.table(table_name).update(data).eq(match_column, match_value).execute() - return response.data - - def multiple_update_data(self,data,table_name, match_column, match_value): - response = self.client.table(table_name).update(data).eq(match_column[0], match_value[0]).eq(match_column[1], match_value[1]).execute() - return response.data - - def upsert_data(self,data,table_name): - response = self.client.table(table_name).upsert(data).execute() - return response.data - - def add_data_filter(self, data, table_name): - # Construct the filter based on the provided column names and values - filter_data = {column: data[column] for column in ['dmp_id','issue_number','owner']} - - # Check if the data already exists in the table based on the filter - existing_data = self.client.table(table_name).select("*").eq('dmp_id',data['dmp_id']).execute() - - # If the data already exists, return without creating a new record - if existing_data.data: - return "Data already exists" - - # If the data doesn't exist, insert it into the table - new_data = self.client.table(table_name).insert(data).execute() - return new_data.data - - def get_dmp_issues(self): - response = self.client.table('dmp_issues').select('*, dmp_orgs(*)').execute() - return response.data diff --git a/query.py b/query.py index 76e2ade..d281b7d 100644 --- a/query.py +++ b/query.py @@ -1,8 +1,5 @@ -from db import SupabaseInterface from sqlalchemy.future import select -from sqlalchemy.orm import Session -from sqlalchemy.exc import NoResultFound - +import psycopg2 from models import * from sqlalchemy import update # from app import async_session @@ -10,8 +7,49 @@ from datetime import datetime, timezone, timedelta from sqlalchemy.orm import aliased import os +from psycopg2.extras import RealDictCursor class PostgresQuery: + def get_postgres_connection(): + + # Database configuration + DB_HOST =os.getenv('POSTGRES_DB_HOST') + DB_NAME =os.getenv('POSTGRES_DB_NAME') + DB_USER =os.getenv('POSTGRES_DB_USER') + DB_PASS =os.getenv('POSTGRES_DB_PASS') + conn = psycopg2.connect( + host=DB_HOST, + database=DB_NAME, + user=DB_USER, + password=DB_PASS + ) + return conn + + def postgres_query(query,params=None): + try: + conn = PostgresQuery.get_postgres_connection() + cursor = conn.cursor(cursor_factory=RealDictCursor) + + # cursor = conn.cursor() + if not params: + cursor.execute(query) + else: + cursor.execute(query,params) + + try: + rows = cursor.fetchall() + except Exception as e: + rows = [] #only for UPDATE method + + results_as_dicts = [dict(row) for row in rows] + + cursor.close() + conn.close() + return results_as_dicts + + except Exception as e: + print(e) + pass def get_issue_query(): query = """ @@ -36,7 +74,7 @@ def get_issue_query(): dmp_orgs.id; """ - data = SupabaseInterface.postgres_query(query) + data = PostgresQuery.postgres_query(query) return data def get_issue_owner(name): @@ -45,7 +83,7 @@ def get_issue_owner(name): FROM dmp_orgs WHERE name = %s; """ - data = SupabaseInterface.postgres_query(query,(name,)) + data = PostgresQuery.postgres_query(query)(query,(name,)) return data def get_actual_owner_query(owner): @@ -55,7 +93,7 @@ def get_actual_owner_query(owner): WHERE name LIKE %s; """ - data = SupabaseInterface.postgres_query(query,(f'%{owner}%',)) + data = PostgresQuery.postgres_query(query)(query,(f'%{owner}%',)) return data @@ -65,7 +103,7 @@ def get_dmp_issues(issue_id): SELECT * FROM dmp_issues WHERE id = %s; """ - data = SupabaseInterface.postgres_query(query,(issue_id,)) + data = PostgresQuery.postgres_query(query)(query,(issue_id,)) return data def get_all_dmp_issues(): @@ -94,7 +132,7 @@ def get_all_dmp_issues(): """ - data = SupabaseInterface.postgres_query(query) + data = PostgresQuery.postgres_query(query)(query) return data def get_dmp_issue_updates(dmp_issue_id): @@ -103,7 +141,7 @@ def get_dmp_issue_updates(dmp_issue_id): SELECT * FROM dmp_issue_updates WHERE dmp_id = %s; """ - data = SupabaseInterface.postgres_query(query,(dmp_issue_id,)) + data = PostgresQuery.postgres_query(query)(query,(dmp_issue_id,)) return data @@ -113,12 +151,12 @@ def get_pr_data(dmp_issue_id): SELECT * FROM dmp_pr_updates WHERE dmp_id = %s; """ - data = SupabaseInterface.postgres_query(query,(dmp_issue_id,)) + data = PostgresQuery.postgres_query(query)(query,(dmp_issue_id,)) return data def postgres_query_insert(query, params=None): try: - conn = SupabaseInterface.get_postgres_connection() + conn = PostgresQuery.get_postgres_connection() from psycopg2.extras import RealDictCursor cursor = conn.cursor(cursor_factory=RealDictCursor) @@ -218,7 +256,7 @@ def get_timestamp(table_name, col_name, col, value): SELECT {col_name} FROM {table_name} WHERE {col} = %s; """ - data = SupabaseInterface.postgres_query(query, (value,)) + data = PostgresQuery.postgres_query(query)(query, (value,)) if data: return data[0][col_name] @@ -235,7 +273,7 @@ def check_week_exist(dmp_id, week): SELECT * FROM dmp_week_updates WHERE dmp_id = %s AND week = %s; """ - data = SupabaseInterface.postgres_query(query, (dmp_id, week)) + data = PostgresQuery.postgres_query(query)(query, (dmp_id, week)) if data: return data @@ -489,4 +527,6 @@ async def insert_dmp_week_update(async_session, update_data): except Exception as e: print(e) - return False \ No newline at end of file + return False + + diff --git a/test.py b/test.py index 9d6465a..0d2dfd3 100644 --- a/test.py +++ b/test.py @@ -1,8 +1,11 @@ -import httpx,logging,os,unittest +import httpx, logging, os, unittest,random from app import app from db import SupabaseInterface -from app import define_issue_description_update ,define_pr_update,define_issue_update - +from app import define_issue_description_update, define_pr_update, define_issue_update, async_session +from query import PostgresORM +from sqlalchemy.orm import aliased +from sqlalchemy.future import select +from models import * # Suppress asyncio debug messages logging.getLogger('asyncio').setLevel(logging.CRITICAL) @@ -34,13 +37,49 @@ async def asyncSetUp(self): self.comments_response = None self.pr_response = None - # Fetch dmp issues from the database - dmp_tickets = self.db.get_dmp_issues() + # CHANGE BELOW DB CALL WHEN CHANGES MADE IN PostgresORM.get_all_dmp_issues() + async with async_session() as session: + # Alias for the DmpOrg table to use in the JSON_BUILD_OBJECT + dmp_org_alias = aliased(DmpOrg) + + # Build the query + query = ( + select( + DmpIssue, + func.json_build_object( + 'created_at', dmp_org_alias.created_at, + 'description', dmp_org_alias.description, + 'id', dmp_org_alias.id, + 'link', dmp_org_alias.link, + 'name', dmp_org_alias.name, + 'repo_owner', dmp_org_alias.repo_owner + ).label('dmp_orgs') + ) + .outerjoin(dmp_org_alias, DmpIssue.org_id == dmp_org_alias.id) + .filter(DmpIssue.org_id.isnot(None)) + .order_by(DmpIssue.id) + ) + + # Execute the query and fetch results + result = await session.execute(query) + rows = result.fetchall() + + # Convert results to dictionaries + data = [] + for row in rows: + issue_dict = row._asdict() # Convert row to dict + dmp_orgs = issue_dict.pop('dmp_orgs') # Extract JSON object from row + issue_dict['dmp_orgs'] = dmp_orgs + issue_dict.update(issue_dict['DmpIssue'].to_dict()) + # Add JSON object back to dict + del issue_dict['DmpIssue'] + data.append(issue_dict) + + dmp_tickets = data if not dmp_tickets: self.skipTest("No dmp_tickets found") - - # Use the first dmp ticket to form the URL - dmp = dmp_tickets[0] + + dmp = dmp_tickets[random.randint(0,len(dmp_tickets)-1)] self.dmp_id = dmp['id'] self.issue_number = dmp['issue_number'] self.repo = dmp['repo'] @@ -80,55 +119,58 @@ async def asyncSetUp(self): self.skipTest(f"GitHub PRs API call failed with status code {pr_response.status_code}") async def test_github_api_call(self): - # Check if the GitHub API call was successful and set the response self.assertIsNotNone(self.issue_response, "No issue response was fetched") self.assertEqual(self.issue_response['state'], 'open', "Issue state should be open") async def test_github_comments_call(self): - # Check if the GitHub comments API call was successful and set the response self.assertIsNotNone(self.comments_response, "No comments response was fetched") self.assertIsInstance(self.comments_response, list, "The comments response should be a list") self.assertTrue(len(self.comments_response) >= 0, "The comments list should not be negative") async def test_github_prs_call(self): - # Check if the GitHub PRs API call was successful and set the response self.assertIsNotNone(self.pr_response, "No PRs response was fetched") self.assertIsInstance(self.pr_response, list, "The PRs response should be a list") self.assertTrue(len(self.pr_response) >= 0, "The PRs list should not be negative") def test_define_issue_description_update(self): - # Ensure the response was set self.assertIsNotNone(self.issue_response, "No issue response was fetched") - - # Call the function to test issue_update = define_issue_description_update(self.issue_response) - - # Check if the function returns a non-empty result self.assertIsInstance(issue_update, dict, "The result should be a dictionary") self.assertTrue(len(issue_update) > 0, "The result should not be an empty dictionary") - + def test_define_pr_update(self): - # Ensure the response was set self.assertIsNotNone(self.pr_response, "No pr response was fetched") - - # Call the function to test - pr_response = define_pr_update(self.pr_response[0],self.dmp_id) - - # Check if the function returns a non-empty result + if self.pr_response==[]: + self.skipTest(f"No data for PR") + pr_response = define_pr_update(self.pr_response[0], self.dmp_id) self.assertIsInstance(pr_response, dict, "The result should be a dictionary") self.assertTrue(len(pr_response) > 0, "The result should not be an empty dictionary") - + def test_define_issue_update(self): - # Ensure the response was set self.assertIsNotNone(self.comments_response, "No pr response was fetched") + issue_response = define_issue_update(self.comments_response[0], self.dmp_id) + self.assertIsInstance(issue_response, dict, "The result should be a dictionary") + self.assertTrue(len(issue_response) > 0, "The result should not be an empty dictionary") + + def test_get_dmp_issues(self): + # Fetch dmp issues from the database + dmp_tickets = self.db.get_dmp_issues() + self.assertTrue(len(dmp_tickets) > 0, "No dmp_tickets found") + # Call the function to test + issue_response = define_issue_update(self.comments_response[0],self.dmp_id) + # Call the function to test issue_response = define_issue_update(self.comments_response[0],self.dmp_id) # Check if the function returns a non-empty result self.assertIsInstance(issue_response, dict, "The result should be a dictionary") self.assertTrue(len(issue_response) > 0, "The result should not be an empty dictionary") - + + # Check if the function returns a non-empty result + self.assertIsInstance(issue_response, dict, "The result should be a dictionary") + self.assertTrue(len(issue_response) > 0, "The result should not be an empty dictionary") + def test_get_dmp_issues(self): # Fetch dmp issues from the database dmp_tickets = self.db.get_dmp_issues() diff --git a/utils.py b/utils.py index 50b864c..f3333bb 100644 --- a/utils.py +++ b/utils.py @@ -2,7 +2,6 @@ import requests import logging import markdown2 -from db import SupabaseInterface from query import PostgresQuery,PostgresORM def parse_issue_description(issue_body): @@ -46,7 +45,6 @@ async def handle_week_data(comment, issue_url, dmp_id, mentor_name,async_session if "Weekly Goals" not in plain_text_body and "Weekly Learnings" not in plain_text_body: return False - db = SupabaseInterface().get_instance() # find matched from issue body week_matches = re.findall(r'(<.*?>Week \d+<.*?>)', plain_text_body) @@ -90,9 +88,6 @@ async def handle_week_data(comment, issue_url, dmp_id, mentor_name,async_session "dmp_id": dmp_id } - exist = db.client.table('dmp_week_updates').select( - "*").eq('dmp_id', week_json['dmp_id']).eq('week', week_json['week']).execute() - exist = await PostgresORM.get_week_updates(async_session,week_json['dmp_id'],week_json['week']) From 76c1847579fa52593d737ce9bb38c235ed859ab0 Mon Sep 17 00:00:00 2001 From: sasi Date: Fri, 9 Aug 2024 16:54:23 +0530 Subject: [PATCH 05/32] testcase changes --- app.py | 2 +- query.py | 2 +- test.py | 8 ++++---- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/app.py b/app.py index 52ea897..4062387 100644 --- a/app.py +++ b/app.py @@ -180,7 +180,7 @@ async def dmp_updates(): comment_update = define_issue_update(val, dmp_id=dmp_id) app.logger.info('Comment from remote: ', comment_update) - #get created_at + #get created_at created_timestamp = PostgresQuery.get_timestamp('dmp_issue_updates','created_at','comment_id',comment_update['comment_id']) comment_update['created_at'] = datetime.utcnow() if not created_timestamp else created_timestamp comment_update['comment_updated_at'] = datetime.utcnow().replace(tzinfo=None) diff --git a/query.py b/query.py index d281b7d..bcb28fb 100644 --- a/query.py +++ b/query.py @@ -256,7 +256,7 @@ def get_timestamp(table_name, col_name, col, value): SELECT {col_name} FROM {table_name} WHERE {col} = %s; """ - data = PostgresQuery.postgres_query(query)(query, (value,)) + data = PostgresQuery.postgres_query(query, (value,)) if data: return data[0][col_name] diff --git a/test.py b/test.py index 0d2dfd3..72a1cc9 100644 --- a/test.py +++ b/test.py @@ -1,6 +1,5 @@ import httpx, logging, os, unittest,random from app import app -from db import SupabaseInterface from app import define_issue_description_update, define_pr_update, define_issue_update, async_session from query import PostgresORM from sqlalchemy.orm import aliased @@ -32,7 +31,6 @@ class TestDMPUpdates(unittest.IsolatedAsyncioTestCase): async def asyncSetUp(self): self.app = app self.client = self.app.test_client() - self.db = SupabaseInterface().get_instance() self.issue_response = None self.comments_response = None self.pr_response = None @@ -79,6 +77,8 @@ async def asyncSetUp(self): if not dmp_tickets: self.skipTest("No dmp_tickets found") + self.dmp_tickets = dmp_tickets + dmp = dmp_tickets[random.randint(0,len(dmp_tickets)-1)] self.dmp_id = dmp['id'] self.issue_number = dmp['issue_number'] @@ -154,7 +154,7 @@ def test_define_issue_update(self): def test_get_dmp_issues(self): # Fetch dmp issues from the database - dmp_tickets = self.db.get_dmp_issues() + dmp_tickets = self.dmp_tickets self.assertTrue(len(dmp_tickets) > 0, "No dmp_tickets found") # Call the function to test @@ -173,7 +173,7 @@ def test_get_dmp_issues(self): def test_get_dmp_issues(self): # Fetch dmp issues from the database - dmp_tickets = self.db.get_dmp_issues() + dmp_tickets = self.dmp_tickets self.assertTrue(len(dmp_tickets) > 0, "No dmp_tickets found") From 313f99c5238cb663ffa0feeff5431400f8e65650 Mon Sep 17 00:00:00 2001 From: sasi Date: Fri, 9 Aug 2024 16:55:23 +0530 Subject: [PATCH 06/32] package removed --- requirements.txt | 1 - 1 file changed, 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 6b1ecb2..2aa40d0 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,7 +2,6 @@ APScheduler==3.10.4 httpx==0.27.0 python-dotenv==1.0.1 Quart==0.19.5 -supabase==2.4.5 requests==2.32.3 markdown2==2.4.13 postgrest==0.16.4 From ce1707ffbf5e1ef7aac853a59d3c01c3f82cd2a4 Mon Sep 17 00:00:00 2001 From: sasi Date: Fri, 9 Aug 2024 17:23:45 +0530 Subject: [PATCH 07/32] changes --- app.py | 4 +- query.py | 326 ++++--------------------------------------------------- utils.py | 2 +- 3 files changed, 23 insertions(+), 309 deletions(-) diff --git a/app.py b/app.py index 4062387..8fd18bd 100644 --- a/app.py +++ b/app.py @@ -4,7 +4,7 @@ from apscheduler.schedulers.asyncio import AsyncIOScheduler from dotenv import load_dotenv from datetime import datetime,timezone -from query import PostgresQuery,PostgresORM +from query import PostgresORM from utils import handle_week_data, parse_issue_description from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession from sqlalchemy.orm import sessionmaker @@ -181,7 +181,7 @@ async def dmp_updates(): app.logger.info('Comment from remote: ', comment_update) #get created_at - created_timestamp = PostgresQuery.get_timestamp('dmp_issue_updates','created_at','comment_id',comment_update['comment_id']) + created_timestamp = await PostgresORM.get_timestamp(async_session, DmpIssueUpdate, 'created_at', 'comment_id', comment_update['comment_id']) comment_update['created_at'] = datetime.utcnow() if not created_timestamp else created_timestamp comment_update['comment_updated_at'] = datetime.utcnow().replace(tzinfo=None) comment_update['created_at'] = comment_update['created_at'].replace(tzinfo=None) diff --git a/query.py b/query.py index bcb28fb..aeab7ea 100644 --- a/query.py +++ b/query.py @@ -1,317 +1,15 @@ from sqlalchemy.future import select -import psycopg2 from models import * from sqlalchemy import update # from app import async_session from sqlalchemy.dialects.postgresql import insert -from datetime import datetime, timezone, timedelta +from datetime import datetime from sqlalchemy.orm import aliased import os -from psycopg2.extras import RealDictCursor +from sqlalchemy.orm import Session +from sqlalchemy.exc import NoResultFound -class PostgresQuery: - def get_postgres_connection(): - - # Database configuration - DB_HOST =os.getenv('POSTGRES_DB_HOST') - DB_NAME =os.getenv('POSTGRES_DB_NAME') - DB_USER =os.getenv('POSTGRES_DB_USER') - DB_PASS =os.getenv('POSTGRES_DB_PASS') - conn = psycopg2.connect( - host=DB_HOST, - database=DB_NAME, - user=DB_USER, - password=DB_PASS - ) - return conn - - def postgres_query(query,params=None): - try: - conn = PostgresQuery.get_postgres_connection() - cursor = conn.cursor(cursor_factory=RealDictCursor) - - # cursor = conn.cursor() - if not params: - cursor.execute(query) - else: - cursor.execute(query,params) - - try: - rows = cursor.fetchall() - except Exception as e: - rows = [] #only for UPDATE method - - results_as_dicts = [dict(row) for row in rows] - - cursor.close() - conn.close() - return results_as_dicts - - except Exception as e: - print(e) - pass - - def get_issue_query(): - query = """ - SELECT - dmp_orgs.id AS org_id, - dmp_orgs.name AS org_name, - json_agg( - json_build_object( - 'id', dmp_issues.id, - 'name', dmp_issues.title - ) - ) AS issues - FROM - dmp_orgs - LEFT JOIN - dmp_issues - ON - dmp_orgs.id = dmp_issues.org_id - GROUP BY - dmp_orgs.id - ORDER BY - dmp_orgs.id; - """ - - data = PostgresQuery.postgres_query(query) - return data - - def get_issue_owner(name): - query = """ - SELECT name, description - FROM dmp_orgs - WHERE name = %s; - """ - data = PostgresQuery.postgres_query(query)(query,(name,)) - return data - - def get_actual_owner_query(owner): - query = """ - SELECT id, name, repo_owner - FROM dmp_orgs - WHERE name LIKE %s; - """ - - data = PostgresQuery.postgres_query(query)(query,(f'%{owner}%',)) - return data - - - def get_dmp_issues(issue_id): - - query = """ - SELECT * FROM dmp_issues - WHERE id = %s; - """ - data = PostgresQuery.postgres_query(query)(query,(issue_id,)) - return data - - def get_all_dmp_issues(): - - query = """SELECT - dmp_issues.*, - json_build_object( - 'created_at', dmp_orgs.created_at, - 'description', dmp_orgs.description, - 'id', dmp_orgs.id, - 'link', dmp_orgs.link, - 'name', dmp_orgs.name, - 'repo_owner', dmp_orgs.repo_owner - ) AS dmp_orgs - FROM - dmp_issues - LEFT JOIN - dmp_orgs - ON - dmp_issues.org_id = dmp_orgs.id - WHERE - dmp_issues.org_id IS NOT NULL - ORDER BY - dmp_issues.id; - - - """ - - data = PostgresQuery.postgres_query(query)(query) - return data - - def get_dmp_issue_updates(dmp_issue_id): - - query = """ - SELECT * FROM dmp_issue_updates - WHERE dmp_id = %s; - """ - data = PostgresQuery.postgres_query(query)(query,(dmp_issue_id,)) - return data - - - def get_pr_data(dmp_issue_id): - - query = """ - SELECT * FROM dmp_pr_updates - WHERE dmp_id = %s; - """ - data = PostgresQuery.postgres_query(query)(query,(dmp_issue_id,)) - return data - - def postgres_query_insert(query, params=None): - try: - conn = PostgresQuery.get_postgres_connection() - from psycopg2.extras import RealDictCursor - cursor = conn.cursor(cursor_factory=RealDictCursor) - - if not params: - cursor.execute(query) - else: - cursor.execute(query, params) - - # Check if the query is an update/insert/delete or a select - if query.strip().lower().startswith("select"): - rows = cursor.fetchall() - results_as_dicts = [dict(row) for row in rows] - cursor.close() - conn.close() - return results_as_dicts - else: - # For update/insert/delete, commit the transaction and close cursor - conn.commit() - cursor.close() - conn.close() - return True - - except Exception as e: - print(f"An error occurred:postgres_query_insert {e}") - raise Exception - - - def update_data(data, table_name, match_column, match_value): - try: - # Construct the SQL query - set_clause = ", ".join([f"{key} = %s" for key in data.keys()]) - query = f"UPDATE {table_name} SET {set_clause} WHERE {match_column} = %s" - - # Values to update - values = list(data.values()) - values.append(match_value) - - # Execute the query using postgres_query - PostgresQuery.postgres_query_insert(query, values) - return True - - except Exception as e: - print(f"An error occurred:update_data {e}") - return None - - - - - def insert_data(data, table_name, match_column, match_value): - try: - # Construct the SQL query - set_clause = ", ".join([f"{key} = %s" for key in data.keys()]) - query = f"INSERT INTO {table_name} SET {set_clause} WHERE {match_column} = %s" - - # Values to update - values = list(data.values()) - values.append(match_value) - - # Execute the query using postgres_query - PostgresQuery.postgres_query_insert(query, values) - - return values - except Exception as e: - print(f"An error occurred:upsert_data {e}") - return None - - def upsert_data(data, table_name, conflict_column): - try: - # Construct the SQL query for UPSERT - columns = ', '.join(data.keys()) - placeholders = ', '.join(['%s'] * len(data)) - updates = ', '.join([f"{key} = EXCLUDED.{key}" for key in data.keys()]) - - query = f""" - INSERT INTO {table_name} ({columns}) - VALUES ({placeholders}) - ON CONFLICT ({conflict_column}) - DO UPDATE SET {updates} - """ - - # Values to insert or update - values = list(data.values()) - - # Execute the query using postgres_query - PostgresQuery.postgres_query_insert(query, values) - return values - - except Exception as e: - print(f"An error occurred:upsert_data {e}") - return None - - - def get_timestamp(table_name, col_name, col, value): - try: - query = f""" - SELECT {col_name} FROM {table_name} - WHERE {col} = %s; - """ - data = PostgresQuery.postgres_query(query, (value,)) - - if data: - return data[0][col_name] - else: - return None - - except Exception as e: - print(f"An error occurred:get_timestamp {e}") - return None - - def check_week_exist(dmp_id, week): - try: - query = """ - SELECT * FROM dmp_week_updates - WHERE dmp_id = %s AND week = %s; - """ - data = PostgresQuery.postgres_query(query)(query, (dmp_id, week)) - - if data: - return data - else: - return None - - except Exception as e: - print(f"An error occurred:check_week_exist {e}") - return None - - - def multiple_update_data(data, table_name, match_columns, match_values): - try: - # Construct the SET clause - set_clause = ", ".join([f"{key} = %s" for key in data.keys()]) - - # Construct the WHERE clause for multiple conditions - where_clause = " AND ".join([f"{col} = %s" for col in match_columns]) - - # Combine the clauses into the final query - query = f""" - UPDATE {table_name} - SET {set_clause} - WHERE {where_clause} - """ - - # Values to update followed by the match values - values = list(data.values()) + match_values - - # Execute the query using postgres_query - val = PostgresQuery.postgres_query_insert(query, values) - return val - - except Exception as e: - print(f"An error occurred:multiple_update_data {e}") - raise Exception - - class PostgresORM: def get_postgres_uri(): @@ -321,7 +19,23 @@ def get_postgres_uri(): DB_PASS = os.getenv('POSTGRES_DB_PASS') return f'postgresql+asyncpg://{DB_USER}:{DB_PASS}@{DB_HOST}/{DB_NAME}' - + + async def get_timestamp(async_session, model, col_name: str, col: str, value): + try: + # Construct the ORM query + query = select(getattr(model, col_name)).filter(getattr(model, col) == value) + + # Execute the query and fetch the result + async with async_session() as session: + result = await session.execute(query) + return result.scalar() + + except NoResultFound: + return None + except Exception as e: + print(f"An error occurred: get_column_value {e}") + return None + async def get_all_dmp_issues(async_session): try: async with async_session() as session: diff --git a/utils.py b/utils.py index f3333bb..a269f8d 100644 --- a/utils.py +++ b/utils.py @@ -2,7 +2,7 @@ import requests import logging import markdown2 -from query import PostgresQuery,PostgresORM +from query import PostgresORM def parse_issue_description(issue_body): # Description is everything before goals. From 1f189498097806b6ea1625a7a18dd7ff85cce829 Mon Sep 17 00:00:00 2001 From: sasi Date: Fri, 9 Aug 2024 17:31:34 +0530 Subject: [PATCH 08/32] query changes --- app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app.py b/app.py index 8fd18bd..e292c47 100644 --- a/app.py +++ b/app.py @@ -212,7 +212,7 @@ async def dmp_updates(): if (pr_created_at >= TARGET_DATE): pr_data = define_pr_update(pr_val, dmp_id) - created_timestamp = PostgresQuery.get_timestamp('dmp_pr_updates','created_at','pr_id',pr_data['pr_id']) + created_timestamp = await PostgresORM.get_timestamp(async_session,Prupdates,'created_at','pr_id',pr_data['pr_id']) pr_data['created_at'] = datetime.utcnow() if not created_timestamp else created_timestamp pr_data['created_at'] = pr_data['created_at'].replace(tzinfo=None) From b6f0b647347d068bdfc6e9cceafba39fcf29ce07 Mon Sep 17 00:00:00 2001 From: sasi Date: Fri, 9 Aug 2024 19:15:46 +0530 Subject: [PATCH 09/32] workflow changes - addded postgres vars --- .github/workflows/ci.yml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 1c49fa1..717c06a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -66,10 +66,16 @@ jobs: SUPABASE_KEY: ${{ secrets[format('APP_{0}_SUPABASE_KEY', needs.set_vars.outputs.APP_ENV)] }} SUPABASE_URL: ${{ vars[format('APP_{0}_SUPABASE_URL', needs.set_vars.outputs.APP_ENV)] }} SCHEDULER_DELAY_IN_MINS: ${{ vars[format('APP_{0}_SCHEDULER_DELAY_IN_MINS', needs.set_vars.outputs.APP_ENV)] }} + POSTGRES_DB_HOST: ${{ secrets[format('APP_{0}_POSTGRES_DB_HOST', needs.set_vars.outputs.APP_ENV)] }} + POSTGRES_DB_NAME: ${{ secrets[format('APP_{0}_POSTGRES_DB_NAME', needs.set_vars.outputs.APP_ENV)] }} + POSTGRES_DB_USER: ${{ secrets[format('APP_{0}_POSTGRES_DB_USER', needs.set_vars.outputs.APP_ENV)] }} + POSTGRES_DB_PASS: ${{ secrets[format('APP_{0}_POSTGRES_DB_PASS', needs.set_vars.outputs.APP_ENV)] }} + steps: - name: Checkout code uses: actions/checkout@v2 + - name: Log in to the Container registry uses: docker/login-action@65b78e6e13532edd9afa3aa52ac7964289d1a9c1 with: @@ -87,6 +93,10 @@ jobs: echo "SUPABASE_URL=${SUPABASE_URL}" >> .env echo "SUPABASE_KEY=${SUPABASE_KEY}" >> .env echo "SCHEDULER_DELAY_IN_MINS=${SCHEDULER_DELAY_IN_MINS}" >> .env + echo "POSTGRES_DB_HOST=${POSTGRES_DB_HOST}" >> .env + echo "POSTGRES_DB_NAME=${POSTGRES_DB_NAME}" >> .env + echo "POSTGRES_DB_USER=${POSTGRES_DB_USER}" >> .env + echo "POSTGRES_DB_PASS=${POSTGRES_DB_PASS}" >> .env mv .env ${{ env.DOT_ENV_FILE_NAME }} From 44b2cde4036473ccb71c9cabe5f22c432444ffc0 Mon Sep 17 00:00:00 2001 From: sasi Date: Fri, 9 Aug 2024 19:22:49 +0530 Subject: [PATCH 10/32] req added --- requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements.txt b/requirements.txt index 2aa40d0..d96e908 100644 --- a/requirements.txt +++ b/requirements.txt @@ -7,3 +7,4 @@ markdown2==2.4.13 postgrest==0.16.4 psycopg2-binary==2.9.9 SQLAlchemy==2.0.32 +asyncpg==0.29.0 \ No newline at end of file From f74e6a2da8c4da1301ce1a591e4821e16c2a7d21 Mon Sep 17 00:00:00 2001 From: sasi Date: Wed, 21 Aug 2024 18:47:52 +0530 Subject: [PATCH 11/32] added logs --- app.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/app.py b/app.py index e292c47..9e7b531 100644 --- a/app.py +++ b/app.py @@ -224,9 +224,11 @@ async def dmp_updates(): print('issue response ', issue_response) app.logger.error("PR API failed: " + str(issue_response.status_code) + " for dmp_id: "+str(dmp_id)) + print(f"last run at - {datetime.utcnow()}") return "success" except Exception as e: print(e) + print(f"last run with error - {datetime.utcnow()}") return "Server Error" From 1c2d102dc3a0491450eb99337dfe4c0acc2f61dc Mon Sep 17 00:00:00 2001 From: jaanbaaz <106968030+jaanbaaz@users.noreply.github.com> Date: Thu, 12 Sep 2024 19:24:35 +0530 Subject: [PATCH 12/32] Create build-and-push.yaml added a new action for building images on push to dev and main --- .github/workflows/build-and-push.yaml | 57 +++++++++++++++++++++++++++ 1 file changed, 57 insertions(+) create mode 100644 .github/workflows/build-and-push.yaml diff --git a/.github/workflows/build-and-push.yaml b/.github/workflows/build-and-push.yaml new file mode 100644 index 0000000..1e58239 --- /dev/null +++ b/.github/workflows/build-and-push.yaml @@ -0,0 +1,57 @@ +name: Build and Push Docker Image + +on: + push: + branches: + - main + - dev + release: + types: [published] +env: + REGISTRY: ghcr.io + IMAGE_NAME: ${{ github.repository }} + +jobs: + build-and-push: + runs-on: ubuntu-latest + # Sets the permissions granted to the `GITHUB_TOKEN` for the actions in this job. + permissions: + contents: read + packages: write + steps: + + - name: Checkout code + uses: actions/checkout@v2 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + + - name: Log in to the Container registry + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Extract metadata (tags, labels) for Docker + id: meta + uses: docker/metadata-action@9ec57ed1fcdbf14dcef7dfbe97b2010124a938b7 + with: + images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + tags: | + # minimal + type=pep440,pattern={{version}},value=${{ github.ref_name }},enable=${{ github.event_name == 'release' }} + # branch event + type=ref,event=branch + type=raw,value=latest,enable=${{ github.event_name == 'release' }} + + - name: Build and Push Docker image + uses: docker/build-push-action@v4 + with: + # build-args: + context: . + push: true + cache-from: type=gha + cache-to: type=gha,mode=max + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} From 4501aff486c15a901461c9e5b1766b5da83b185f Mon Sep 17 00:00:00 2001 From: jaanbaaz Date: Wed, 4 Dec 2024 17:54:39 +0530 Subject: [PATCH 13/32] first commit --- README.md | Bin 0 -> 58 bytes 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 README.md diff --git a/README.md b/README.md new file mode 100644 index 0000000000000000000000000000000000000000..88b2a7e5e073d26c94e1ef212826f7ed54b4701f GIT binary patch literal 58 zcmezWPnki1p_n0qA(5eoA(bJ8L6;$yAs literal 0 HcmV?d00001 From cb8c194d69b28172a8ba89c4be28857a7241c21c Mon Sep 17 00:00:00 2001 From: jaanbaaz Date: Wed, 4 Dec 2024 17:55:14 +0530 Subject: [PATCH 14/32] Initial setup commit --- alembic.ini | 117 ++++++++++++++++++++++++++++++++++++++ db/__init__.py | 0 db/cron.py | 0 db/discord-bot.py | 0 db/dmp_api.py | 0 db/models.py | 0 db/server.py | 0 migrations/README | 1 + migrations/env.py | 78 +++++++++++++++++++++++++ migrations/script.py.mako | 26 +++++++++ requirements.txt | Bin 0 -> 120 bytes 11 files changed, 222 insertions(+) create mode 100644 alembic.ini create mode 100644 db/__init__.py create mode 100644 db/cron.py create mode 100644 db/discord-bot.py create mode 100644 db/dmp_api.py create mode 100644 db/models.py create mode 100644 db/server.py create mode 100644 migrations/README create mode 100644 migrations/env.py create mode 100644 migrations/script.py.mako create mode 100644 requirements.txt diff --git a/alembic.ini b/alembic.ini new file mode 100644 index 0000000..421e3fe --- /dev/null +++ b/alembic.ini @@ -0,0 +1,117 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +# Use forward slashes (/) also on windows to provide an os agnostic path +script_location = migrations + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file +# for all available tokens +# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python>=3.9 or backports.zoneinfo library. +# Any required deps can installed by adding `alembic[tz]` to the pip requirements +# string value is passed to ZoneInfo() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to migrations/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" below. +# version_locations = %(here)s/bar:%(here)s/bat:migrations/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. +# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. +# Valid values for version_path_separator are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +# version_path_separator = newline +version_path_separator = os # Use os.pathsep. Default configuration used for new projects. + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +sqlalchemy.url = driver://user:pass@localhost/dbname + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# lint with attempts to fix using "ruff" - use the exec runner, execute a binary +# hooks = ruff +# ruff.type = exec +# ruff.executable = %(here)s/.venv/bin/ruff +# ruff.options = --fix REVISION_SCRIPT_FILENAME + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARNING +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARNING +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/db/__init__.py b/db/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/db/cron.py b/db/cron.py new file mode 100644 index 0000000..e69de29 diff --git a/db/discord-bot.py b/db/discord-bot.py new file mode 100644 index 0000000..e69de29 diff --git a/db/dmp_api.py b/db/dmp_api.py new file mode 100644 index 0000000..e69de29 diff --git a/db/models.py b/db/models.py new file mode 100644 index 0000000..e69de29 diff --git a/db/server.py b/db/server.py new file mode 100644 index 0000000..e69de29 diff --git a/migrations/README b/migrations/README new file mode 100644 index 0000000..98e4f9c --- /dev/null +++ b/migrations/README @@ -0,0 +1 @@ +Generic single-database configuration. \ No newline at end of file diff --git a/migrations/env.py b/migrations/env.py new file mode 100644 index 0000000..36112a3 --- /dev/null +++ b/migrations/env.py @@ -0,0 +1,78 @@ +from logging.config import fileConfig + +from sqlalchemy import engine_from_config +from sqlalchemy import pool + +from alembic import context + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +target_metadata = None + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + connectable = engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure( + connection=connection, target_metadata=target_metadata + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/migrations/script.py.mako b/migrations/script.py.mako new file mode 100644 index 0000000..fbc4b07 --- /dev/null +++ b/migrations/script.py.mako @@ -0,0 +1,26 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + ${downgrades if downgrades else "pass"} diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..09654fd487b9ff1c6b983e3b0358619ab63fe849 GIT binary patch literal 120 zcmYL>y$(P?5QM*4;!(&s8ihiy5l?VLkoXIgm&dq9HnZ96d^6AOnCV$)ndm4q39*!E p8$tqhPA++yt^rHr*%im3|Cg|0bFYtYsHrH`+cEm?pE9CS@h>Ic6G#96 literal 0 HcmV?d00001 From f6cb9cc442fea02a9d54d3833bcb9d329e6eb8b8 Mon Sep 17 00:00:00 2001 From: jaanbaaz Date: Wed, 4 Dec 2024 19:00:06 +0530 Subject: [PATCH 15/32] Complete setup of alembic to manage migrations --- .gitignore | 1 + .vscode/settings.json | 5 + alembic.ini | 3 +- db/__init__.py | 33 + db/cron.py | 0 db/discord-bot.py | 302 ++++++++ db/dmp_api.py | 67 ++ db/dmp_cron.py | 246 ++++++ db/models.py | 1721 +++++++++++++++++++++++++++++++++++++++++ db/server.py | 938 ++++++++++++++++++++++ migrations/env.py | 10 +- requirements.txt | Bin 120 -> 164 bytes sample.env | 5 + 13 files changed, 3329 insertions(+), 2 deletions(-) create mode 100644 .gitignore create mode 100644 .vscode/settings.json delete mode 100644 db/cron.py create mode 100644 db/dmp_cron.py create mode 100644 sample.env diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..2eea525 --- /dev/null +++ b/.gitignore @@ -0,0 +1 @@ +.env \ No newline at end of file diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..acd8ec1 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,5 @@ +{ + "python.analysis.extraPaths": [ + "./db" + ] +} \ No newline at end of file diff --git a/alembic.ini b/alembic.ini index 421e3fe..8dae057 100644 --- a/alembic.ini +++ b/alembic.ini @@ -61,7 +61,8 @@ version_path_separator = os # Use os.pathsep. Default configuration used for ne # are written from script.py.mako # output_encoding = utf-8 -sqlalchemy.url = driver://user:pass@localhost/dbname +sqlalchemy.url = %(DATABASE_URL)s + [post_write_hooks] diff --git a/db/__init__.py b/db/__init__.py index e69de29..52d6df0 100644 --- a/db/__init__.py +++ b/db/__init__.py @@ -0,0 +1,33 @@ +import os + +# from discord import Member +from dotenv import load_dotenv +from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession +from sqlalchemy.orm import sessionmaker +from sqlalchemy.pool import NullPool + +load_dotenv(".env") + + +def get_postgres_uri(): + DB_HOST = os.getenv('POSTGRES_DB_HOST') + DB_NAME = os.getenv('POSTGRES_DB_NAME') + DB_USER = os.getenv('POSTGRES_DB_USER') + DB_PASS = os.getenv('POSTGRES_DB_PASS') + + # DB_URL = os.getenv('DATABASE_URL') + # print('db') + return f'postgresql+asyncpg://{DB_USER}:{DB_PASS}@{DB_HOST}/{DB_NAME}' + + +class PostgresORM: + + def __init__(self): + DATABASE_URL = get_postgres_uri() + # Initialize Async SQLAlchemy + engine = create_async_engine(DATABASE_URL, echo=False,poolclass=NullPool) + async_session = sessionmaker(autocommit=False, autoflush=False, bind=engine, class_=AsyncSession) + self.session = async_session + + def get_instance(): + return PostgresORM() \ No newline at end of file diff --git a/db/cron.py b/db/cron.py deleted file mode 100644 index e69de29..0000000 diff --git a/db/discord-bot.py b/db/discord-bot.py index e69de29..a9e0de6 100644 --- a/db/discord-bot.py +++ b/db/discord-bot.py @@ -0,0 +1,302 @@ +import os + +# from discord import Member +from dotenv import load_dotenv +from sqlalchemy import create_engine,select,desc,update,delete +from sqlalchemy.orm import sessionmaker +from models import * +from sqlalchemy.ext.declarative import DeclarativeMeta +from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession + +# load_dotenv() + +class DiscordBotQueries: + # def __init__(self): + # DB_HOST = os.getenv('POSTGRES_DB_HOST') + # DB_NAME = os.getenv('POSTGRES_DB_NAME') + # DB_USER = os.getenv('POSTGRES_DB_USER') + # DB_PASS = os.getenv('POSTGRES_DB_PASS') + + # engine = create_async_engine(f'postgresql+asyncpg://{DB_USER}:{DB_PASS}@{DB_HOST}/{DB_NAME}') + # async_session = sessionmaker(autocommit=False, autoflush=False, bind=engine, class_=AsyncSession) + # self.session = async_session + + # def get_instance(): + # return PostgresClient() + + def convert_dict(self,data): + try: + if type(data) == list: + data = [val.to_dict() for val in data] + else: + return [data.to_dict()] + + return data + except Exception as e: + print(e) + raise Exception + + def getStatsStorage(self, fileName): + return self.client.storage.from_("c4gt-github-profile").download(fileName) + + + def logVCAction(self,user, action): + try: + new_log = VcLogs(discord_id=user.id, discord_name=user.name, option=action) + self.session.add(new_log) + self.session.commit() + return self.convert_dict(new_log) + except Exception as e: + self.session.rollback() + print("Error logging VC action:", e) + return None + + def getLeaderboard(self, id: int): + data = self.session.query(Leaderboard).where(Leaderboard.discord_id == id).all() + return self.convert_dict(data) + + + def read(self, table_class, query_key, query_value, columns=None): + try: + stmt = select(table_class) + stmt = stmt.where(getattr(table_class, query_key) == query_value) + + if columns: + stmt = stmt.with_only_columns(*(getattr(table_class, col) for col in columns)) + result = self.session.execute(stmt) + rows = result.fetchall() + column_names = [col.name for col in stmt.columns] + data = [dict(zip(column_names, row)) for row in rows] + return data + + result = self.session.execute(stmt) + return self.convert_dict(result.scalars().all()) + + except Exception as e: + print(f"Error reading data from table '{table_class}':", e) + return None + + def get_class_by_tablename(self,tablename): + try: + for cls in Base.registry._class_registry.values(): + if isinstance(cls, DeclarativeMeta): + if hasattr(cls, '__tablename__') and cls.__tablename__ == tablename: + return cls + return None + except Exception as e: + print(f"ERROR get_class_by_tablename - {e}") + return None + + def read_by_order_limit(self, table_class, query_key, query_value, order_column, order_by=False, limit=1, columns="*"): + try: + stmt = select(table_class) + stmt = stmt.where(getattr(table_class, query_key) == query_value) + if order_by: + stmt = stmt.order_by(desc(getattr(table_class, order_column))) + else: + stmt = stmt.order_by(getattr(table_class, order_column)) + + stmt = stmt.limit(limit) + if columns != "*": + stmt = stmt.with_only_columns(*(getattr(table_class, col) for col in columns)) + + result = self.session.execute(stmt) + results = result.fetchall() + + # Convert results to list of dictionaries + column_names = [col['name'] for col in result.keys()] + data = [dict(zip(column_names, row)) for row in results] + + return data + + except Exception as e: + print("Error reading data:", e) + return None + + async def read_all(self,table_class): + try: + table = self.get_class_by_tablename(table_class) + # Query all records from the specified table class + async with self.session() as session: + stmt = select(table) + result = await session.execute(stmt) + + data = result.scalars().all() + result = self.convert_dict(data) + return result + except Exception as e: + print(f"An error occurred -read_all_from_table : {e}") + return None + + def update(self, table_class, update_data, query_key, query_value): + try: + stmt = ( + update(table_class) + .where(getattr(table_class, query_key) == query_value) + .values(update_data) + .returning(*[getattr(table_class, col) for col in update_data.keys()]) # Return updated columns + ) + + result = self.session.execute(stmt) + self.session.commit() + updated_record = result.fetchone() + + if updated_record: + updated_record_dict = dict(zip(result.keys(), updated_record)) + return updated_record_dict + else: + return None + except Exception as e: + import pdb;pdb.set_trace() + print("Error updating record:", e) + return None + + + def insert(self, table, data): + try: + new_record = table(**data) + self.session.add(new_record) + self.session.commit() + return new_record.to_dict() + except Exception as e: + print("Error inserting data:", e) + self.session.rollback() # Rollback in case of error + return None + + + def memberIsAuthenticated(self, member: Member): + data = self.session.query(ContributorsRegistration).where(ContributorsRegistration.discord_id == member.id).all() + if data: + return True + else: + return False + + def addChapter(self, roleId: int, orgName: str, type: str): + try: + existing_record = self.session.query(Chapters).filter_by(discord_role_id=roleId).first() + + if existing_record: + existing_record.type = type + existing_record.org_name = orgName + else: + new_record = Chapters(discord_role_id=roleId, type=type, org_name=orgName) + self.session.add(new_record) + + self.session.commit() + return existing_record.to_dict() if existing_record else new_record.to_dict() + except Exception as e: + print("Error adding or updating chapter:", e) + return None + + + def deleteChapter(self,roleId: int): + try: + # Build the delete statement + stmt = delete(Chapters).where(Chapters.discord_role_id == roleId) + result = self.session.execute(stmt) + self.session.commit() + return True if result.rowcount else False + except Exception as e: + print("Error deleting chapter:", e) + return None + + async def updateContributor(self, contributor: Member, table_class=None): + try: + async with self.session() as session: + if table_class == None: + table_class = ContributorsDiscord + chapters = lookForRoles(contributor.roles)["chapter_roles"] + gender = lookForRoles(contributor.roles)["gender"] + + # Prepare the data to be upserted + update_data = { + "discord_id": contributor.id, + "discord_username": contributor.name, + "chapter": chapters[0] if chapters else None, + "gender": gender, + "joined_at": contributor.joined_at, + } + + stmt = select(ContributorsDiscord).where(ContributorsDiscord.discord_id == contributor.id) + result = await session.execute(stmt) + existing_record = result.scalars().first() + + # existing_record = self.session.query(table_class).filter_by(discord_id=contributor.id).first() + + if existing_record: + stmt = ( + update(table_class) + .where(table_class.discord_id == contributor.id) + .values(update_data) + ) + self.session.execute(stmt) + else: + new_record = table_class(**update_data) + self.session.add(new_record) + + # Commit the transaction + self.session.commit() + return True + except Exception as e: + print("Error updating contributor:", e) + return False + + + def updateContributors(self, contributors: [Member], table_class): + try: + for contributor in contributors: + chapters = lookForRoles(contributor.roles)["chapter_roles"] + gender = lookForRoles(contributor.roles)["gender"] + update_data = { + "discord_id": contributor.id, + "discord_username": contributor.name, + "chapter": chapters[0] if chapters else None, + "gender": gender, + "joined_at": contributor.joined_at, + } + existing_record = self.session.query(table_class).filter_by(discord_id=contributor.id).first() + + if existing_record: + stmt = ( + update(table_class) + .where(table_class.discord_id == contributor.id) + .values(update_data) + ) + self.session.execute(stmt) + else: + new_record = table_class(**update_data) + self.session.add(new_record) + + self.session.commit() + return True + except Exception as e: + print("Error updating contributors:", e) + return False + + + def deleteContributorDiscord(self, contributorDiscordIds, table_class=None): + try: + if table_class == None: + table_class = ContributorsDiscord + stmt = delete(table_class).where(table_class.discord_id.in_(contributorDiscordIds)) + self.session.execute(stmt) + self.session.commit() + + return True + except Exception as e: + print("Error deleting contributors:", e) + self.session.rollback() + return False + + + + def read_all_active(self, table): + if table == "contributors_discord": + table = ContributorsDiscord + data = self.session.query(table).where(table.is_active == True).all() + return self.convert_dict(data) + + def invalidateContributorDiscord(self, contributorDiscordIds): + table = "contributors_discord" + for id in contributorDiscordIds: + self.client.table(table).update({'is_active': 'false'}).eq('discord_id', id).execute() diff --git a/db/dmp_api.py b/db/dmp_api.py index e69de29..8b0b02d 100644 --- a/db/dmp_api.py +++ b/db/dmp_api.py @@ -0,0 +1,67 @@ +from models import * +from sqlalchemy import func +import os +from dotenv import load_dotenv + + +# load_dotenv() + + +class DmpAPIQueries: + + # def get_postgres_uri(): + # DB_HOST = os.getenv('POSTGRES_DB_HOST') + # DB_NAME = os.getenv('POSTGRES_DB_NAME') + # DB_USER = os.getenv('POSTGRES_DB_USER') + # DB_PASS = os.getenv('POSTGRES_DB_PASS') + + # return f'postgresql://{DB_USER}:{DB_PASS}@{DB_HOST}/{DB_NAME}' + + def get_issue_query(): + results = ( + db.session.query( + DmpOrg.id.label('org_id'), + DmpOrg.name.label('org_name'), + func.json_agg( + func.json_build_object( + 'id', DmpIssue.id, + 'name', DmpIssue.title + ) + ).label('issues') + ) + .outerjoin(DmpIssue, DmpOrg.id == DmpIssue.org_id) + .group_by(DmpOrg.id) + .order_by(DmpOrg.id) + .all() + ) + + return results + + def get_issue_owner(name): + response = DmpOrg.query.filter_by(name=name).all() + return response + + def get_actual_owner_query(owner): + results = DmpIssue.query.filter(DmpIssue.repo_owner.like(f'%{owner}%')).all() + results = [val.to_dict() for val in results] + return results + + + def get_dmp_issues(issue_id): + results = DmpIssue.query.filter_by(id=issue_id).all() + results = [val.to_dict() for val in results] + return results + + + def get_dmp_issue_updates(dmp_issue_id): + results = DmpIssueUpdate.query.filter_by(dmp_id=dmp_issue_id).all() + results = [val.to_dict() for val in results] + return results + + + def get_pr_data(dmp_issue_id): + pr_updates = Prupdates.query.filter_by(dmp_id=dmp_issue_id).all() + pr_updates_dict = [pr_update.to_dict() for pr_update in pr_updates] + return pr_updates_dict + + \ No newline at end of file diff --git a/db/dmp_cron.py b/db/dmp_cron.py new file mode 100644 index 0000000..6cdfcb4 --- /dev/null +++ b/db/dmp_cron.py @@ -0,0 +1,246 @@ +from sqlalchemy.future import select +from models import * +from sqlalchemy import update +# from app import async_session +from sqlalchemy.dialects.postgresql import insert +from datetime import datetime +from sqlalchemy.orm import aliased +import os +from sqlalchemy.orm import Session +from sqlalchemy.exc import NoResultFound + + +class DmpCronQueries: + + # def get_postgres_uri(): + # DB_HOST = os.getenv('POSTGRES_DB_HOST') + # DB_NAME = os.getenv('POSTGRES_DB_NAME') + # DB_USER = os.getenv('POSTGRES_DB_USER') + # DB_PASS = os.getenv('POSTGRES_DB_PASS') + + # return f'postgresql+asyncpg://{DB_USER}:{DB_PASS}@{DB_HOST}/{DB_NAME}' + + async def get_timestamp(async_session, model, col_name: str, col: str, value): + try: + # Construct the ORM query + query = select(getattr(model, col_name)).filter(getattr(model, col) == value) + + # Execute the query and fetch the result + async with async_session() as session: + result = await session.execute(query) + return result.scalar() + + except NoResultFound: + return None + except Exception as e: + print(f"An error occurred: get_column_value {e}") + return None + + async def get_all_dmp_issues(async_session): + try: + async with async_session() as session: + # Alias for the DmpOrg table to use in the JSON_BUILD_OBJECT + dmp_org_alias = aliased(DmpOrg) + + # Build the query + query = ( + select( + DmpIssue, + func.json_build_object( + 'created_at', dmp_org_alias.created_at, + 'description', dmp_org_alias.description, + 'id', dmp_org_alias.id, + 'link', dmp_org_alias.link, + 'name', dmp_org_alias.name, + 'repo_owner', dmp_org_alias.repo_owner + ).label('dmp_orgs') + ) + .outerjoin(dmp_org_alias, DmpIssue.org_id == dmp_org_alias.id) + .filter(DmpIssue.org_id.isnot(None)) + .order_by(DmpIssue.id) + ) + + # Execute the query and fetch results + result = await session.execute(query) + rows = result.fetchall() + + # Convert results to dictionaries + data = [] + for row in rows: + issue_dict = row._asdict() # Convert row to dict + dmp_orgs = issue_dict.pop('dmp_orgs') # Extract JSON object from row + issue_dict['dmp_orgs'] = dmp_orgs + issue_dict.update(issue_dict['DmpIssue'].to_dict()) + # Add JSON object back to dict + del issue_dict['DmpIssue'] + data.append(issue_dict) + + return data + + except Exception as e: + print(e) + raise Exception + + async def update_dmp_issue(async_session,issue_id: int, update_data: dict): + try: + async with async_session() as session: + async with session.begin(): + # Build the update query + query = ( + update(DmpIssue) + .where(DmpIssue.id == issue_id) + .values(**update_data) + ) + + # Execute the query + await session.execute(query) + await session.commit() + return True + + except Exception as e: + return False + + + async def upsert_data_orm(async_session, update_data): + try: + + async with async_session() as session: + async with session.begin(): + + # Define the insert statement + stmt = insert(DmpIssueUpdate).values(**update_data) + + # Define the update statement in case of conflict + stmt = stmt.on_conflict_do_update( + index_elements=['comment_id'], + set_={ + 'body_text': stmt.excluded.body_text, + 'comment_link': stmt.excluded.comment_link, + 'comment_api': stmt.excluded.comment_api, + 'comment_updated_at': stmt.excluded.comment_updated_at, + 'dmp_id': stmt.excluded.dmp_id, + 'created_by': stmt.excluded.created_by, + 'created_at': stmt.excluded.created_at + } + ) + + # Execute the statement + await session.execute(stmt) + await session.commit() + + return True + + except Exception as e: + print(e) + return False + + + + async def upsert_pr_update(async_session, pr_update_data): + try: + async with async_session() as session: + async with session.begin(): + pr_update_data['pr_updated_at'] = datetime.fromisoformat(pr_update_data['pr_updated_at']).replace(tzinfo=None) if pr_update_data['pr_updated_at'] else None + pr_update_data['merged_at'] = datetime.fromisoformat(pr_update_data['merged_at']).replace(tzinfo=None) if pr_update_data['merged_at'] else None + pr_update_data['closed_at'] = datetime.fromisoformat(pr_update_data['closed_at']).replace(tzinfo=None) if pr_update_data['closed_at'] else None + + # Prepare the insert statement + stmt = insert(Prupdates).values(**pr_update_data) + + # Prepare the conflict resolution strategy + stmt = stmt.on_conflict_do_update( + index_elements=['pr_id'], # Assuming `pr_id` is the unique key + set_={ + 'status': stmt.excluded.status, + 'merged_at': stmt.excluded.merged_at, + 'closed_at': stmt.excluded.closed_at, + 'pr_updated_at': stmt.excluded.pr_updated_at, + 'dmp_id': stmt.excluded.dmp_id, + 'created_at': stmt.excluded.created_at, + 'title': stmt.excluded.title, + 'link': stmt.excluded.link + } + ) + # Execute and commit the transaction + await session.execute(stmt) + await session.commit() + + return True + + except Exception as e: + print(e) + return False + + + + async def update_dmp_week_update(async_session, update_data): + try: + async with async_session() as session: + async with session.begin(): + # Define the filter conditions + stmt = ( + select(DmpWeekUpdate) + .where( + DmpWeekUpdate.week == update_data['week'], + DmpWeekUpdate.dmp_id == update_data['dmp_id'] + ) + ) + + # Fetch the row that needs to be updated + result = await session.execute(stmt) + dmp_week_update = result.scalars().first() + + if dmp_week_update: + # Update the fields with the values from update_data + for key, value in update_data.items(): + setattr(dmp_week_update, key, value) + + # Commit the changes + await session.commit() + return True + except Exception as e: + print(e) + return False + + + + async def get_week_updates(async_session, dmp_id, week): + try: + async with async_session() as session: + # Build the ORM query + stmt = select(DmpWeekUpdate).where( + DmpWeekUpdate.dmp_id == dmp_id, + DmpWeekUpdate.week == week + ) + # Execute the query + result = await session.execute(stmt) + + # Fetch all matching rows + week_updates = result.scalars().all() + + + return True if len(week_updates)>0 else False + + except Exception as e: + return False + + + + async def insert_dmp_week_update(async_session, update_data): + try: + async with async_session() as session: + async with session.begin(): + # Define the insert statement + stmt = insert(DmpWeekUpdate).values(**update_data) + + # Execute the statement + await session.execute(stmt) + await session.commit() + + return True + + except Exception as e: + print(e) + return False + + diff --git a/db/models.py b/db/models.py index e69de29..58268a6 100644 --- a/db/models.py +++ b/db/models.py @@ -0,0 +1,1721 @@ +from datetime import datetime +from sqlalchemy.orm import relationship +from sqlalchemy import UUID, Boolean, Float, MetaData, Column, Integer, SmallInteger, String, Text, DateTime, ForeignKey, BigInteger, TypeDecorator, UniqueConstraint, func +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.types import TypeDecorator, DateTime as SA_DateTime + +Base = declarative_base() +# Shared metadata object +shared_metadata = MetaData() + +class DmpOrg(Base): + __tablename__ = 'dmp_orgs' + + id = Column(Integer, primary_key=True, autoincrement=True) + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) + name = Column(String, nullable=False) + description = Column(Text, nullable=True) + link = Column(String, nullable=False) + repo_owner = Column(String, nullable=False) + + # Relationship to DmpIssueUpdate + issues = relationship('DmpIssueUpdate', backref='organization', lazy=True) + + # Updated relationship name to avoid conflict + dmp_issues = relationship('DmpIssue', backref='organization', lazy=True) + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'id': self.id, + 'created_at': self.created_at.isoformat(), + 'name': self.name, + 'description': self.description, + 'link': self.link, + 'repo_owner': self.repo_owner + } + +class DmpIssue(Base): + __tablename__ = 'dmp_issues' + + id = Column(Integer, primary_key=True, autoincrement=True) + issue_url = Column(String, nullable=False) + issue_number = Column(Integer, nullable=False) + mentor_username = Column(String, nullable=True) + contributor_username = Column(String, nullable=True) + title = Column(String, nullable=False) + org_id = Column(Integer, ForeignKey('dmp_orgs.id'), nullable=False) + description = Column(Text, nullable=True) + repo_owner = Column(Text, nullable=True) + repo = Column(String, nullable=True) + + + # Relationship to Prupdates + pr_updates = relationship('Prupdates', backref='pr_details', lazy=True) + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'id': self.id, + 'issue_url': self.issue_url, + 'issue_number': self.issue_number, + 'mentor_username': self.mentor_username, + 'contributor_username': self.contributor_username, + 'title': self.title, + 'org_id': self.org_id, + 'description': self.description, + 'repo': self.repo, + 'repo_owner': self.repo_owner + } + +class DmpIssueUpdate(Base): + __tablename__ = 'dmp_issue_updates' + + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) + body_text = Column(Text, nullable=False) + comment_link = Column(String, nullable=False) + comment_id = Column(BigInteger, primary_key=True, nullable=False) + comment_api = Column(String, nullable=False) + comment_updated_at = Column(DateTime, nullable=False) + dmp_id = Column(Integer, ForeignKey('dmp_orgs.id'), nullable=False) + created_by = Column(String, nullable=False) + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'created_at': self.created_at.isoformat(), + 'body_text': self.body_text, + 'comment_link': self.comment_link, + 'comment_id': self.comment_id, + 'comment_api': self.comment_api, + 'comment_updated_at': self.comment_updated_at.isoformat(), + 'dmp_id': self.dmp_id, + 'created_by': self.created_by + } + +class Prupdates(Base): + __tablename__ = 'dmp_pr_updates' + + created_at = Column(DateTime, nullable=False, default=datetime.utcnow) + pr_id = Column(Integer, nullable=False,primary_key=True) + status = Column(String, nullable=False) + title = Column(String, nullable=False) + pr_updated_at = Column(DateTime, nullable=False, default=datetime.utcnow) + merged_at = Column(DateTime) + closed_at = Column(DateTime) + dmp_id = Column(Integer, ForeignKey('dmp_issues.id'), nullable=False) # ForeignKey relationship + link = Column(String, nullable=False) + + def __repr__(self): + return f'' + + def to_dict(self): + return { + 'created_at': self.created_at.isoformat(), + 'pr_id': self.pr_id, + 'status': self.status, + 'title': self.title, + 'pr_updated_at': self.pr_updated_at.isoformat(), + 'merged_at': self.merged_at.isoformat() if self.merged_at else None, + 'closed_at': self.closed_at.isoformat() if self.closed_at else None, + 'dmp_id': self.dmp_id, + 'link': self.link + } + +class DmpWeekUpdate(Base): + __tablename__ = 'dmp_week_updates' + + id = Column(Integer, primary_key=True, autoincrement=True) + issue_url = Column(String, nullable=False) + week = Column(Integer, nullable=False) + total_task = Column(Integer, nullable=False) + completed_task = Column(Integer, nullable=False) + progress = Column(Integer, nullable=False) + task_data = Column(Text, nullable=False) + dmp_id = Column(Integer, nullable=False) + + def __repr__(self): + return f"" + + + +class DateTime(TypeDecorator): + impl = SA_DateTime + + def process_bind_param(self, value, dialect): + if isinstance(value, str): + try: + # Convert string to datetime + return datetime.fromisoformat(value) + except ValueError: + # If conversion fails, return None + return None + return value + + def process_result_value(self, value, dialect): + return value + + +class AppComments(Base): + __tablename__ = 'app_comments' + + id = Column(BigInteger, primary_key=True, autoincrement=True) + updated_at = Column(DateTime, nullable=True) + api_url = Column(Text, nullable=True) + comment_id = Column(BigInteger, nullable=True) + issue_id = Column(BigInteger, unique=True) + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'id': str(self.id), + 'updated_at': self.updated_at, + 'api_url': self.api_url, + 'comment_id': self.comment_id, + 'issue_id': self.issue_id + } + +class Badges(Base): + __tablename__ = 'badges' + id = Column(UUID(as_uuid=True), primary_key=True) + image = Column(Text, nullable=True) + text = Column(Text, nullable=True) + description = Column(Text, nullable=True) + created_at = Column(DateTime, nullable=True) + updated_at = Column(DateTime, nullable=True) + + user_badges = relationship('UserBadges', back_populates='badge') + + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'image': self.image, + 'text': self.text, + 'description': self.description, + 'created_at': self.created_at, + 'updated_at': self.updated_at + } + +class CcbpTickets(Base): + __tablename__ = 'ccbp_tickets' + __table_args__ = {'comment': 'A table to store details of CCBP Tickets from various projects'} + + created_at = Column(DateTime, nullable=True) + name = Column(Text, nullable=True) + product = Column(Text, nullable=True) + complexity = Column(Text, nullable=True) + project_category = Column(Text, nullable=True) + project_sub_category = Column(Text, nullable=True) + reqd_skills = Column(Text, nullable=True) + issue_id = Column(BigInteger, unique=True) + api_endpoint_url = Column(Text, unique=True, nullable=True) + url = Column(Text, unique=True, nullable=True) + ticket_points = Column(SmallInteger, nullable=True, comment='How many points the ticket is worth') + index = Column(SmallInteger, unique=True, autoincrement=True) + mentors = Column(Text, nullable=True) + uuid = Column(UUID(as_uuid=True), primary_key=True) + status = Column(Text, nullable=True) + community_label = Column(Boolean, nullable=True, comment='has community label') + organization = Column(Text, nullable=True) + closed_at = Column(DateTime, nullable=True, comment='date-time at which issue was closed') + assignees = Column(Text, nullable=True) + issue_author = Column(Text, nullable=True) + is_assigned = Column(Boolean, nullable=False) + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'created_at': self.created_at, + 'name': self.name, + 'product': self.product, + 'complexity': self.complexity, + 'project_category': self.project_category, + 'project_sub_category': self.project_sub_category, + 'reqd_skills': self.reqd_skills, + 'issue_id': self.issue_id, + 'api_endpoint_url': self.api_endpoint_url, + 'url': self.url, + 'ticket_points': self.ticket_points, + 'index': self.index, + 'mentors': self.mentors, + 'uuid': str(self.uuid), + 'status': self.status, + 'community_label': self.community_label, + 'organization': self.organization, + 'closed_at': self.closed_at, + 'assignees': self.assignees, + 'issue_author': self.issue_author, + 'is_assigned': self.is_assigned + } + +class Chapters(Base): + __tablename__ = 'chapters' + + id = Column(UUID(as_uuid=True), primary_key=True) + type = Column(Text, nullable=True) + org_name = Column(Text, unique=True) + primary_organisation = Column(Text, nullable=True, comment='the organisation that the chapter is mapped to') + sessions = Column(Integer, nullable=True) + discord_role_id = Column(BigInteger, unique=True, comment='db id of the corresponding member role in discord server') + created_at = Column(DateTime, nullable=True) + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'id': self.id, + 'type': self.type, + 'org_name': self.org_name, + 'primary_organisation': self.primary_organisation, + 'sessions': self.sessions, + 'discord_role_id': self.discord_role_id, + 'created_at': self.created_at + } + + +## + +class ConnectedPrs(Base): + __tablename__ = 'connected_prs' + + id = Column(UUID(as_uuid=True), primary_key=True) + created_at = Column(DateTime, nullable=True) + api_url = Column(Text, nullable=True) + html_url = Column(Text, unique=True, nullable=True) + raised_by = Column(BigInteger, nullable=True) + raised_at = Column(DateTime, nullable=False) + raised_by_username = Column(Text, nullable=False) + status = Column(Text, nullable=True) + is_merged = Column(Boolean, nullable=True) + merged_by = Column(BigInteger, nullable=True) + merged_at = Column(Text, nullable=True) + merged_by_username = Column(Text, nullable=True) + pr_id = Column(BigInteger, nullable=False, comment='github id of the pr') + points = Column(SmallInteger, nullable=False) + ticket_url = Column(Text, nullable=False) + ticket_complexity = Column(Text, nullable=True) + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'id': self.id, + 'created_at': self.created_at, + 'api_url': self.api_url, + 'html_url': self.html_url, + 'raised_by': self.raised_by, + 'raised_at': self.raised_at, + 'raised_by_username': self.raised_by_username, + 'status': self.status, + 'is_merged': self.is_merged, + 'merged_by': self.merged_by, + 'merged_at': self.merged_at, + 'merged_by_username': self.merged_by_username, + 'pr_id': self.pr_id, + 'points': self.points, + 'ticket_url': self.ticket_url, + 'ticket_complexity': self.ticket_complexity + } + +class ContributorNames(Base): + __tablename__ = 'contributor_names' + + id = Column(BigInteger, primary_key=True, autoincrement=True) + discord_id = Column(BigInteger, nullable=False) + name = Column(Text, nullable=True) + country = Column(Text, nullable=True) + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'id': self.id, + 'discord_id': self.discord_id, + 'name': self.name, + 'country': self.country + } + +class ContributorsDiscord(Base): + __tablename__ = 'contributors_discord' + + id = Column(BigInteger, primary_key=True, autoincrement=True) + discord_id = Column(BigInteger, unique=True, nullable=False) + github_id = Column(BigInteger, nullable=True) + github_url = Column(String, nullable=True) + discord_username = Column(String, nullable=True) + joined_at = Column(DateTime, nullable=False) + email = Column(Text, nullable=True) + field_name = Column(Text, nullable=True, name='name') # Adjusted field name + chapter = Column(Text, nullable=True, comment="the chapter they're associated with") + gender = Column(Text, nullable=True) + is_active = Column(Boolean, nullable=False) + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'id': self.id, + 'discord_id': self.discord_id, + 'github_id': self.github_id, + 'github_url': self.github_url, + 'discord_username': self.discord_username, + 'joined_at': self.joined_at, + 'email': self.email, + 'name': self.field_name, + 'chapter': self.chapter, + 'gender': self.gender, + 'is_active': self.is_active + } + +class ContributorsRegistration(Base): + __tablename__ = 'contributors_registration' + + id = Column(BigInteger, primary_key=True, autoincrement=True) + discord_id = Column(BigInteger, unique=True, nullable=False) + github_id = Column(BigInteger, unique=True, nullable=False) + github_url = Column(String, nullable=False) + discord_username = Column(String, nullable=True) + joined_at = Column(DateTime, nullable=False) + email = Column(Text, nullable=True) + name = Column(Text, nullable=True) + + point_transactions = relationship('PointTransactions', back_populates='contributor') + + user_activities = relationship('UserActivity', back_populates='contributor') + user_points_mappings = relationship('UserPointsMapping', back_populates='contributors') + + + def __repr__(self): + return f"" + + + def to_dict(self): + return { + 'id': self.id, + 'discord_id': self.discord_id, + 'github_id': self.github_id, + 'github_url': self.github_url, + 'discord_username': self.discord_username, + 'joined_at': self.joined_at, + 'email': self.email, + 'name': self.name + } + +class DiscordChannels(Base): + __tablename__ = 'discord_channels' + + channel_id = Column(BigInteger, primary_key=True) + channel_name = Column(Text, nullable=True) + webhook = Column(Text, nullable=True) + should_notify = Column(Boolean, nullable=False) + + products = relationship('Product', back_populates='channel') + + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'channel_id': self.channel_id, + 'channel_name': self.channel_name, + 'webhook': self.webhook, + 'should_notify': self.should_notify + } + +class DiscordEngagement(Base): + __tablename__ = 'discord_engagement' + __table_args__ = {'comment': 'engagement metrics for contributors'} + + id = Column(BigInteger, primary_key=True, autoincrement=True) + created_at = Column(DateTime, nullable=True) + contributor = Column(BigInteger, unique=True, nullable=False) + has_introduced = Column(Boolean, nullable=True) + total_message_count = Column(BigInteger, nullable=True) + total_reaction_count = Column(BigInteger, nullable=True) + converserbadge = Column(Boolean, nullable=True) + apprenticebadge = Column(Boolean, nullable=True) + rockstarbadge = Column(Boolean, nullable=True) + enthusiastbadge = Column(Boolean, nullable=True) + risingstarbadge = Column(Boolean, nullable=True) + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'id': self.id, + 'created_at': self.created_at, + 'contributor': self.contributor, + 'has_introduced': self.has_introduced, + 'total_message_count': self.total_message_count, + 'total_reaction_count': self.total_reaction_count, + 'converserbadge': self.converserbadge, + 'apprenticebadge': self.apprenticebadge, + 'rockstarbadge': self.rockstarbadge, + 'enthusiastbadge': self.enthusiastbadge, + 'risingstarbadge': self.risingstarbadge + } + +class DmpIssueUpdates(Base): + __tablename__ = 'dmp_issue_updates' + __table_args__ = {'comment': 'Having records of dmp with issue details'} + + created_at = Column(DateTime, nullable=False) + body_text = Column(Text, nullable=True) + comment_link = Column(Text, nullable=True) + comment_id = Column(BigInteger, primary_key=True) + comment_api = Column(String, nullable=True) + comment_updated_at = Column(DateTime, nullable=True) + dmp_id = Column(BigInteger, ForeignKey('dmp_issues.id'), nullable=False) + created_by = Column(Text, nullable=False) + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'created_at': self.created_at, + 'body_text': self.body_text, + 'comment_link': self.comment_link, + 'comment_id': self.comment_id, + 'comment_api': self.comment_api, + 'comment_updated_at': self.comment_updated_at, + 'dmp_id': self.dmp_id, + 'created_by': self.created_by + } + + +class DmpIssues(Base): + __tablename__ = 'dmp_issues' + + id = Column(BigInteger, primary_key=True, autoincrement=True) + issue_url = Column(String, nullable=False) + issue_number = Column(BigInteger, nullable=False) + mentor_username = Column(Text, nullable=True) + contributor_username = Column(Text, nullable=True) + title = Column(Text, nullable=False) + org_id = Column(BigInteger, ForeignKey('dmp_orgs.id'), nullable=False) + description = Column(Text, nullable=False) + repo = Column(Text, nullable=False) + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'id': self.id, + 'issue_url': self.issue_url, + 'issue_number': self.issue_number, + 'mentor_username': self.mentor_username, + 'contributor_username': self.contributor_username, + 'title': self.title, + 'org_id': self.org_id, + 'description': self.description, + 'repo': self.repo + } + +class DmpOrgs(Base): + __tablename__ = 'dmp_orgs' + + id = Column(BigInteger, primary_key=True, autoincrement=True) + created_at = Column(DateTime, nullable=False) + name = Column(Text, nullable=False) + description = Column(Text, nullable=False) + link = Column(Text, nullable=False) + repo_owner = Column(Text, nullable=False) + + # issues = relationship('Issues', backref='organization', lazy='joined') + + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'id': self.id, + 'created_at': self.created_at, + 'name': self.name, + 'description': self.description, + 'link': self.link, + 'repo_owner': self.repo_owner + } + +class DmpPrUpdates(Base): + __tablename__ = 'dmp_pr_updates' + __table_args__ = {'comment': 'Having PR related records'} + + created_at = Column(DateTime, nullable=False) + pr_id = Column(BigInteger, primary_key=True) + status = Column(String, nullable=False) + title = Column(Text, nullable=False) + pr_updated_at = Column(DateTime, nullable=True) + merged_at = Column(DateTime, nullable=True) + closed_at = Column(DateTime, nullable=True) + dmp_id = Column(BigInteger, ForeignKey('dmp_issues.id'), nullable=False) + link = Column(Text, nullable=False) + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'created_at': self.created_at, + 'pr_id': self.pr_id, + 'status': self.status, + 'title': self.title, + 'pr_updated_at': self.pr_updated_at, + 'merged_at': self.merged_at, + 'closed_at': self.closed_at, + 'dmp_id': self.dmp_id, + 'link': self.link + } + +class DmpTickets(Base): + __tablename__ = 'dmp_tickets' + + created_at = Column(DateTime, nullable=True) + name = Column(Text, nullable=True) + product = Column(Text, nullable=True) + complexity = Column(Text, nullable=True) + project_category = Column(Text, nullable=True) + project_sub_category = Column(Text, nullable=True) + reqd_skills = Column(Text, nullable=True) + issue_id = Column(BigInteger, unique=True, nullable=False) + api_endpoint_url = Column(Text, unique=True, nullable=True) + url = Column(Text, unique=True, nullable=True) + ticket_points = Column(Integer, nullable=True, comment='How many points the ticket is worth') + index = Column(Integer, unique=True, autoincrement=True) + mentors = Column(Text, nullable=True) + uuid = Column(UUID(as_uuid=True), primary_key=True) + status = Column(Text, nullable=True) + community_label = Column(Boolean, nullable=True, comment='has community label') + organization = Column(Text, nullable=True) + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'created_at': self.created_at, + 'name': self.name, + 'product': self.product, + 'complexity': self.complexity, + 'project_category': self.project_category, + 'project_sub_category': self.project_sub_category, + 'reqd_skills': self.reqd_skills, + 'issue_id': self.issue_id, + 'api_endpoint_url': self.api_endpoint_url, + 'url': self.url, + 'ticket_points': self.ticket_points, + 'index': self.index, + 'mentors': self.mentors, + 'uuid': self.uuid, + 'status': self.status, + 'community_label': self.community_label, + 'organization': self.organization + } + +class DmpWeekUpdates(Base): + __tablename__ = 'dmp_week_updates' + + id = Column(BigInteger, primary_key=True, autoincrement=True) + issue_url = Column(Text, nullable=False) + week = Column(BigInteger, nullable=True) + total_task = Column(BigInteger, nullable=True) + completed_task = Column(BigInteger, nullable=True) + progress = Column(Float, nullable=True) + task_data = Column(Text, nullable=True) + dmp_id = Column(BigInteger, nullable=True) + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'id': self.id, + 'issue_url': self.issue_url, + 'week': self.week, + 'total_task': self.total_task, + 'completed_task': self.completed_task, + 'progress': self.progress, + 'task_data': self.task_data, + 'dmp_id': self.dmp_id + } + +class GithubClassroomData(Base): + __tablename__ = 'github_classroom_data' + __table_args__ = {'comment': 'Table for saving the details about github classroom assignment data'} + + id = Column(BigInteger, primary_key=True, autoincrement=True) + created_at = Column(DateTime, nullable=False) + assignment_name = Column(Text, nullable=False) + assignment_url = Column(Text, nullable=False) + assignment_id = Column(Text, nullable=True) + starter_code_url = Column(Text, nullable=False) + github_username = Column(Text, nullable=True) + roster_identifier = Column(Text, nullable=True) + student_repository_name = Column(Text, nullable=True) + student_repository_url = Column(Text, nullable=True) + submission_timestamp = Column(DateTime, nullable=False) + points_awarded = Column(Integer, nullable=True) + points_available = Column(Integer, nullable=True) + c4gt_points = Column(Integer, nullable=True) + discord_id = Column(Text, nullable=True) + updated_at = Column(DateTime, nullable=True) + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'id': self.id, + 'created_at': self.created_at, + 'assignment_name': self.assignment_name, + 'assignment_url': self.assignment_url, + 'assignment_id': self.assignment_id, + 'starter_code_url': self.starter_code_url, + 'github_username': self.github_username, + 'roster_identifier': self.roster_identifier, + 'student_repository_name': self.student_repository_name, + 'student_repository_url': self.student_repository_url, + 'submission_timestamp': self.submission_timestamp, + 'points_awarded': self.points_awarded, + 'points_available': self.points_available, + 'c4gt_points': self.c4gt_points, + 'discord_id': self.discord_id, + 'updated_at': self.updated_at + } + +class GithubInstallations(Base): + __tablename__ = 'github_installations' + + id = Column(BigInteger, primary_key=True, autoincrement=True) + github_organisation = Column(Text, unique=True, nullable=False) + installation_id = Column(BigInteger, unique=True, nullable=False) + target_type = Column(Text, nullable=True, comment='Type of github entity that installed the app, usually "Organisation"') + github_ids = Column(Text, nullable=True, comment="Identifiers on the github database, prolly won't be used") + permissions_and_events = Column(Text, nullable=True) + created_at = Column(DateTime, nullable=True) + organisation = Column(Text, ForeignKey('community_organisations.name'), nullable=True) + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'id': self.id, + 'github_organisation': self.github_organisation, + 'installation_id': self.installation_id, + 'target_type': self.target_type, + 'github_ids': self.github_ids, + 'permissions_and_events': self.permissions_and_events, + 'created_at': self.created_at, + 'organisation': self.organisation + } +## + +class GithubOrganisationsToOrganisations(Base): + __tablename__ = 'github_organisations_to_organisations' + + id = Column(BigInteger, primary_key=True, autoincrement=True) + github_organisation = Column(Text, nullable=False) + organisation = Column(Text, nullable=True) + created_at = Column(DateTime, nullable=True, comment='Creation date of organization ticket') + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'id': self.id, + 'github_organisation': self.github_organisation, + 'organisation': self.organisation, + 'created_at': self.created_at + } + +class IssueContributors(Base): + __tablename__ = 'issue_contributors' + + id = Column(BigInteger, primary_key=True, autoincrement=True) + contributor_id = Column(BigInteger, ForeignKey('contributors_registration.id')) + issue_id = Column(BigInteger, ForeignKey('issues.id'), primary_key=True) + role = Column(BigInteger, ForeignKey('role_master.id'), nullable=True) + created_at = Column(DateTime, nullable=True) + updated_at = Column(DateTime, nullable=True) + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'contributor_id': self.contributor_id, + 'issue_id': self.issue_id, + 'role_id': self.role, + 'created_at': self.created_at, + 'updated_at': self.updated_at + } + +class IssueMentors(Base): + __tablename__ = 'issue_mentors' + + id = Column(BigInteger, primary_key=True, autoincrement=True) + issue_id = Column(BigInteger, ForeignKey('issues.id')) + org_mentor_id = Column(Text, nullable=True) + angel_mentor_id = Column(BigInteger, ForeignKey('contributors_registration.id')) + created_at = Column(DateTime, nullable=True) + updated_at = Column(DateTime, nullable=True) + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'issue_id': self.issue_id, + 'org_mentor_id': self.org_mentor_id, + 'angel_mentor_id': self.angel_mentor_id, + 'created_at': self.created_at, + 'updated_at': self.updated_at + } + +class Issues(Base): + __tablename__ = 'issues' + + id = Column(BigInteger, primary_key=True) + link = Column(Text, nullable=False) + labels = Column(Text, nullable=True) + project_type = Column(Text, nullable=True) + complexity = Column(Text, nullable=True) + skills = Column(Text, nullable=True) + technology = Column(Text, nullable=True) + status = Column(Text, nullable=True) + created_at = Column(DateTime, nullable=True) + updated_at = Column(DateTime, nullable=True) + title = Column(Text, nullable=True) + domain = Column(Text, nullable=True) + description = Column(Text, nullable=True) + org_id = Column(BigInteger, ForeignKey('community_orgs.id'), nullable=True) + issue_id = Column(BigInteger, unique=True) + + point_transactions = relationship('PointTransactions', back_populates='issue') + user_activities = relationship('UserActivity', back_populates='issue') + + + + def __repr__(self): + return f"" + + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'id': self.id, + 'link': self.link, + 'labels': self.labels, + 'complexity': self.complexity, + 'skills': self.skills, + 'technology': self.technology, + 'status': self.status, + 'created_at': self.created_at, + 'updated_at': self.updated_at, + 'title': self.title, + 'description': self.description, + 'org_id': self.org_id, + 'issue_id': self.issue_id, + 'project_type':self.project_type, + 'domain': self.domain + } + +class MentorDetails(Base): + __tablename__ = 'mentor_details' + + id = Column(BigInteger, primary_key=True) + name = Column(String(255), nullable=True) + email = Column(String(255), nullable=True) + discord_id = Column(String(255), nullable=True) + discord_username = Column(String(255), nullable=True) + github_id = Column(String(255), nullable=True) + created_at = Column(DateTime, nullable=True) + updated_at = Column(DateTime, nullable=True) + + point_transactions = relationship('PointTransactions', back_populates='mentor') + user_activities = relationship('UserActivity', back_populates='mentor') + user_points_mappings = relationship('UserPointsMapping', back_populates='mentor') + + + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'id': self.id, + 'name': self.name, + 'email': self.email, + 'discord_id': self.discord_id, + 'discord_username': self.discord_username, + 'github_id': self.github_id, + 'created_at': self.created_at, + 'updated_at': self.updated_at + } + +class MentorshipProgramSiteStructure(Base): + __tablename__ = 'mentorship_program_site_structure' + + id = Column(BigInteger, primary_key=True) + product_id = Column(BigInteger, ForeignKey('product.id'), nullable=True) + project_id = Column(BigInteger, ForeignKey('mentorship_program_projects.id'), nullable=True) + contributor_id = Column(BigInteger, ForeignKey('mentorship_program_selected_contributors.id'), nullable=True) + website_directory_label = Column(Text, nullable=True) + directory_url = Column(Text, nullable=True) + + # project = relationship('MentorshipProgramProjects', back_populates='site_structures') + # contributor = relationship('MentorshipProgramSelectedContributors', back_populates='site_structures') + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'id': self.id, + 'product_id': self.product_id, + 'project_id': self.project_id, + 'contributor_id': self.contributor_id, + 'website_directory_label': self.website_directory_label, + 'directory_url': self.directory_url + } + +class MentorshipProgramWebsiteComments(Base): + __tablename__ = 'mentorship_program_website_comments' + + comment_id = Column(BigInteger, primary_key=True) + url = Column(Text, nullable=True) + html_url = Column(Text, nullable=True) + commented_by_username = Column(Text, nullable=True) + commented_by_id = Column(BigInteger, nullable=True) + created_at = Column(DateTime, nullable=True) + updated_at = Column(DateTime, nullable=True) + body = Column(Text, nullable=True) + pr_id = Column(BigInteger, nullable=True) + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'comment_id': self.comment_id, + 'url': self.url, + 'html_url': self.html_url, + 'commented_by_username': self.commented_by_username, + 'commented_by_id': self.commented_by_id, + 'created_at': self.created_at, + 'updated_at': self.updated_at, + 'body': self.body, + 'pr_id': self.pr_id + } + +class MentorshipProgramWebsiteCommits(Base): + __tablename__ = 'mentorship_program_website_commits' + + node_id = Column(Text, primary_key=True) + url = Column(Text, nullable=True) + html_url = Column(Text, nullable=True) + comment_count = Column(Integer, nullable=True) + date = Column(DateTime, nullable=True) + author_id = Column(BigInteger, nullable=True) + author_username = Column(Text, nullable=True) + author_email = Column(Text, nullable=True) + committer_id = Column(BigInteger, nullable=True) + committer_username = Column(Text, nullable=True) + committer_email = Column(Text, nullable=True) + additions = Column(Integer, nullable=True) + deletions = Column(Integer, nullable=True) + files = Column(Text, nullable=True) + project_folder_name = Column(Text, nullable=True) + pr_id = Column(BigInteger, nullable=True) + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'node_id': self.node_id, + 'url': self.url, + 'html_url': self.html_url, + 'comment_count': self.comment_count, + 'date': self.date, + 'author_id': self.author_id, + 'author_username': self.author_username, + 'author_email': self.author_email, + 'committer_id': self.committer_id, + 'committer_username': self.committer_username, + 'committer_email': self.committer_email, + 'additions': self.additions, + 'deletions': self.deletions, + 'files': self.files, + 'project_folder_name': self.project_folder_name, + 'pr_id': self.pr_id + } + +class MentorshipProgramWebsiteHasUpdated(Base): + __tablename__ = 'mentorship_program_website_has_updated' + + id = Column(BigInteger, primary_key=True) + project_id = Column(BigInteger, ForeignKey('mentorship_program_projects.id'), nullable=True) + week1_update_date = Column(DateTime, nullable=True) + week2_update_date = Column(DateTime, nullable=True) + week3_update_date = Column(DateTime, nullable=True) + week4_update_date = Column(DateTime, nullable=True) + week5_update_date = Column(DateTime, nullable=True) + week6_update_date = Column(DateTime, nullable=True) + week7_update_date = Column(DateTime, nullable=True) + week8_update_date = Column(DateTime, nullable=True) + week9_update_date = Column(DateTime, nullable=True) + week1_is_default_text = Column(Boolean, nullable=True) + week2_is_default_text = Column(Boolean, nullable=True) + week3_is_default_text = Column(Boolean, nullable=True) + week4_is_default_text = Column(Boolean, nullable=True) + week5_is_default_text = Column(Boolean, nullable=True) + week6_is_default_text = Column(Boolean, nullable=True) + week7_is_default_text = Column(Boolean, nullable=True) + week8_is_default_text = Column(Boolean, nullable=True) + week9_is_default_text = Column(Boolean, nullable=True) + product = Column(Text, nullable=True) + project_folder = Column(Text, unique=True, nullable=False) + all_links = Column(Text, nullable=True) + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'id': self.id, + 'project_id': self.project_id, + 'week1_update_date': self.week1_update_date, + 'week2_update_date': self.week2_update_date, + 'week3_update_date': self.week3_update_date, + 'week4_update_date': self.week4_update_date, + 'week5_update_date': self.week5_update_date, + 'week6_update_date': self.week6_update_date, + 'week7_update_date': self.week7_update_date, + 'week8_update_date': self.week8_update_date, + 'week9_update_date': self.week9_update_date, + 'week1_is_default_text': self.week1_is_default_text, + 'week2_is_default_text': self.week2_is_default_text, + 'week3_is_default_text': self.week3_is_default_text, + 'week4_is_default_text': self.week4_is_default_text, + 'week5_is_default_text': self.week5_is_default_text, + 'week6_is_default_text': self.week6_is_default_text, + 'week7_is_default_text': self.week7_is_default_text, + 'week8_is_default_text': self.week8_is_default_text, + 'week9_is_default_text': self.week9_is_default_text, + 'product': self.product, + 'project_folder': self.project_folder, + 'all_links': self.all_links + } + + + +## + +class MentorshipProgramWebsitePullRequest(Base): + __tablename__ = 'mentorship_program_website_pull_request' + + pr_url = Column(Text, nullable=True) + pr_id = Column(BigInteger, primary_key=True) + pr_node_id = Column(Text, unique=True, nullable=True) + html_url = Column(Text, nullable=True) + status = Column(Text, nullable=True) + title = Column(Text, nullable=True) + raised_by_username = Column(Text, nullable=True) + raised_by_id = Column(Integer, nullable=True) + body = Column(Text, nullable=True) + created_at = Column(DateTime, nullable=True) + updated_at = Column(DateTime, nullable=True) + closed_at = Column(DateTime, nullable=True) + merged_at = Column(DateTime, nullable=True) + assignees = Column(Text, nullable=True) + requested_reviewers = Column(Text, nullable=True) + labels = Column(Text, nullable=True) + review_comments_url = Column(Text, nullable=True) + comments_url = Column(Text, nullable=True) + repository_id = Column(Integer, nullable=True) + repository_owner_name = Column(Text, nullable=True) + repository_owner_id = Column(Integer, nullable=True) + repository_url = Column(Text, nullable=True) + merged = Column(Boolean, nullable=True) + number_of_commits = Column(Integer, nullable=True) + number_of_comments = Column(Integer, nullable=True) + lines_of_code_added = Column(Integer, nullable=True) + lines_of_code_removed = Column(Integer, nullable=True) + number_of_files_changed = Column(Integer, nullable=True) + merged_by_id = Column(BigInteger, nullable=True) + merged_by_username = Column(Text, nullable=True) + linked_ticket = Column(Text, nullable=True) + project_name = Column(Text, nullable=True) + project_folder_label = Column(Text, nullable=True) + week_number = Column(SmallInteger, nullable=True) + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'pr_url': self.pr_url, + 'pr_id': self.pr_id, + 'pr_node_id': self.pr_node_id, + 'html_url': self.html_url, + 'status': self.status, + 'title': self.title, + 'raised_by_username': self.raised_by_username, + 'raised_by_id': self.raised_by_id, + 'body': self.body, + 'created_at': self.created_at, + 'updated_at': self.updated_at, + 'closed_at': self.closed_at, + 'merged_at': self.merged_at, + 'assignees': self.assignees, + 'requested_reviewers': self.requested_reviewers, + 'labels': self.labels, + 'review_comments_url': self.review_comments_url, + 'comments_url': self.comments_url, + 'repository_id': self.repository_id, + 'repository_owner_name': self.repository_owner_name, + 'repository_owner_id': self.repository_owner_id, + 'repository_url': self.repository_url, + 'merged': self.merged, + 'number_of_commits': self.number_of_commits, + 'number_of_comments': self.number_of_comments, + 'lines_of_code_added': self.lines_of_code_added, + 'lines_of_code_removed': self.lines_of_code_removed, + 'number_of_files_changed': self.number_of_files_changed, + 'merged_by_id': self.merged_by_id, + 'merged_by_username': self.merged_by_username, + 'linked_ticket': self.linked_ticket, + 'project_name': self.project_name, + 'project_folder_label': self.project_folder_label, + 'week_number': self.week_number + } + +class MentorshipWebsiteContributorProject(Base): + __tablename__ = 'mentorship_website_contributor_project' + + project_folder = Column(Text, primary_key=True) + contributor = Column(Text, nullable=True) + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'project_folder': self.project_folder, + 'contributor': self.contributor + } + +class PointSystem(Base): + __tablename__ = 'point_system' + + id = Column(BigInteger, primary_key=True) + complexity = Column(Text, nullable=False) + points = Column(SmallInteger, nullable=True) + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'id': self.id, + 'complexity': self.complexity, + 'points': self.points + } + +class PointTransactions(Base): + __tablename__ = 'point_transactions' + + id = Column(BigInteger, primary_key=True) + user_id = Column(BigInteger, ForeignKey('contributors_registration.id'), nullable=True) + issue_id = Column(BigInteger, ForeignKey('issues.id'), nullable=False) + point = Column(Integer, nullable=True) + type = Column(Text, nullable=True) + created_at = Column(DateTime, default=func.now(), nullable=False) # Set to current time when created + updated_at = Column(DateTime, default=func.now(), onupdate=func.now(), nullable=False) # Updated to current time when record is modified + angel_mentor_id = Column(BigInteger, ForeignKey('mentor_details.id'), nullable=True) + + + contributor = relationship('ContributorsRegistration', back_populates='point_transactions') + issue = relationship('Issues', back_populates='point_transactions') + mentor = relationship('MentorDetails', back_populates='point_transactions') + + def __repr__(self): + return f"" + + + def to_dict(self): + return { + 'id': self.id, + 'user_id': self.user_id, + 'issue_id': self.issue_id, + 'point': self.point, + 'type': self.type, + 'created_at': self.created_at, + 'updated_at': self.updated_at, + 'angel_mentor_id': self.angel_mentor_id + } + +class PointsMapping(Base): + __tablename__ = 'points_mapping' + + id = Column(BigInteger, primary_key=True) + role = Column(String(50), nullable=False) + complexity = Column(String(50), nullable=False) + points = Column(Integer, nullable=False) + created_at = Column(DateTime, nullable=True) + updated_at = Column(DateTime, nullable=True) + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'id': self.id, + 'role': self.role, + 'complexity': self.complexity, + 'points': self.points, + 'created_at': self.created_at, + 'updated_at': self.updated_at + } + + + +### + +class PrHistory(Base): + __tablename__ = 'pr_history' + + id = Column(BigInteger, primary_key=True, autoincrement=True) + created_at = Column(DateTime, nullable=True) + api_url = Column(Text, nullable=True) + html_url = Column(Text, unique=True, nullable=True) + raised_by = Column(BigInteger, nullable=True) + raised_at = Column(DateTime, nullable=False) + raised_by_username = Column(Text, nullable=False) + status = Column(Text, nullable=True) + is_merged = Column(Boolean, nullable=True) + merged_by = Column(BigInteger, nullable=True) + merged_at = Column(Text, nullable=True) + merged_by_username = Column(Text, nullable=True) + pr_id = Column(BigInteger, nullable=False) + ticket_url = Column(Text, nullable=False) + ticket_complexity = Column(Text, nullable=True) + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'id': self.id, + 'created_at': self.created_at, + 'api_url': self.api_url, + 'html_url': self.html_url, + 'raised_by': self.raised_by, + 'raised_at': self.raised_at, + 'raised_by_username': self.raised_by_username, + 'status': self.status, + 'is_merged': self.is_merged, + 'merged_by': self.merged_by, + 'merged_at': self.merged_at, + 'merged_by_username': self.merged_by_username, + 'pr_id': self.pr_id, + 'ticket_url': self.ticket_url, + 'ticket_complexity': self.ticket_complexity + } + +class PrStaging(Base): + __tablename__ = 'pr_staging' + + id = Column(String(36), primary_key=True) # UUID field + created_at = Column(DateTime, nullable=True) + api_url = Column(Text, nullable=True) + html_url = Column(Text, unique=True, nullable=True) + raised_by = Column(BigInteger, nullable=True) + raised_at = Column(DateTime, nullable=False) + raised_by_username = Column(Text, nullable=False) + status = Column(Text, nullable=True) + is_merged = Column(Boolean, nullable=True) + merged_by = Column(BigInteger, nullable=True) + merged_at = Column(Text, nullable=True) + merged_by_username = Column(Text, nullable=True) + pr_id = Column(BigInteger, nullable=False) + points = Column(SmallInteger, nullable=False) + ticket_url = Column(Text, nullable=False) + ticket_complexity = Column(Text, nullable=True) + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'id': self.id, + 'created_at': self.created_at, + 'api_url': self.api_url, + 'html_url': self.html_url, + 'raised_by': self.raised_by, + 'raised_at': self.raised_at, + 'raised_by_username': self.raised_by_username, + 'status': self.status, + 'is_merged': self.is_merged, + 'merged_by': self.merged_by, + 'merged_at': self.merged_at, + 'merged_by_username': self.merged_by_username, + 'pr_id': self.pr_id, + 'points': self.points, + 'ticket_url': self.ticket_url, + 'ticket_complexity': self.ticket_complexity + } + +class Product(Base): + __tablename__ = 'product' + + id = Column(BigInteger, primary_key=True) # Auto field + name = Column(Text, unique=True, nullable=False) + description = Column(Text, nullable=True) + wiki_url = Column(Text, nullable=True) + channel_id = Column(BigInteger, ForeignKey('discord_channels.channel_id'), nullable=True) # Assumes 'DiscordChannels' model + + channel = relationship('DiscordChannels', back_populates='products') + + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'id': self.id, + 'name': self.name, + 'description': self.description, + 'wiki_url': self.wiki_url, + 'channel_id': self.channel_id + } + +class RoleMaster(Base): + __tablename__ = 'role_master' + + id = Column(BigInteger, primary_key=True) # Auto field + created_at = Column(DateTime, nullable=False) + updated_at = Column(DateTime, nullable=True) + role = Column(Text, nullable=True) + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'id': self.id, + 'created_at': self.created_at, + 'updated_at': self.updated_at, + 'role': self.role + } + +class TicketComments(Base): + __tablename__ = 'ticket_comments' + + id = Column(BigInteger, primary_key=True) + url = Column(Text, nullable=True) + html_url = Column(Text, nullable=True) + issue_url = Column(Text, nullable=True) + node_id = Column(Text, nullable=True) + comment_id = Column(BigInteger, nullable=True) + issue_id = Column(BigInteger, nullable=True) + commented_by = Column(Text, nullable=True) + commented_by_id = Column(BigInteger, nullable=True) + created_at = Column(DateTime, nullable=True) + updated_at = Column(DateTime, nullable=True) + content = Column(Text, nullable=True) + reactions_url = Column(Text, nullable=True) + ticket_url = Column(Text, nullable=False) + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'id': self.id, + 'url': self.url, + 'html_url': self.html_url, + 'issue_url': self.issue_url, + 'node_id': self.node_id, + 'commented_by': self.commented_by, + 'commented_by_id': self.commented_by_id, + 'created_at': self.created_at, + 'updated_at': self.updated_at, + 'content': self.content, + 'reactions_url': self.reactions_url, + 'ticket_url': self.ticket_url + } + +class UnlistedTickets(Base): + __tablename__ = 'unlisted_tickets' + + created_at = Column(DateTime, nullable=True) + name = Column(Text, nullable=True) + product = Column(Text, nullable=True) + complexity = Column(Text, nullable=True) + project_category = Column(Text, nullable=True) + project_sub_category = Column(Text, nullable=True) + reqd_skills = Column(Text, nullable=True) + issue_id = Column(BigInteger, unique=True, nullable=False) + api_endpoint_url = Column(Text, unique=True, nullable=True) + url = Column(Text, unique=True, nullable=True) + ticket_points = Column(SmallInteger, nullable=True) + index = Column(SmallInteger, unique=True, nullable=False) + mentors = Column(Text, nullable=True) + uuid = Column(String(36), primary_key=True) # UUID field + status = Column(Text, nullable=True) + organization = Column(Text, nullable=True) + + __table_args__ = (UniqueConstraint('uuid', 'issue_id'),) + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'created_at': self.created_at, + 'name': self.name, + 'product': self.product, + 'complexity': self.complexity, + 'project_category': self.project_category, + 'project_sub_category': self.project_sub_category, + 'reqd_skills': self.reqd_skills, + 'issue_id': self.issue_id, + 'api_endpoint_url': self.api_endpoint_url, + 'url': self.url, + 'ticket_points': self.ticket_points, + 'index': self.index, + 'mentors': self.mentors, + 'uuid': self.uuid, + 'status': self.status, + 'organization': self.organization + } + +class UnstructuredDiscordData(Base): + __tablename__ = 'unstructured_discord_data' + + text = Column(Text, nullable=True) + author = Column(BigInteger, nullable=True) + channel = Column(BigInteger, nullable=True) + channel_name = Column(Text, nullable=True) + uuid = Column(String(36), primary_key=True) # UUID field + author_name = Column(Text, nullable=True) + author_roles = Column(Text, nullable=True) + sent_at = Column(Text, nullable=True) + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'text': self.text, + 'author': self.author, + 'channel': self.channel, + 'channel_name': self.channel_name, + 'uuid': self.uuid, + 'author_name': self.author_name, + 'author_roles': self.author_roles, + 'sent_at': self.sent_at + } + +class UserActivity(Base): + __tablename__ = 'user_activity' + + id = Column(BigInteger, primary_key=True, autoincrement=True) + contributor_id = Column(BigInteger, ForeignKey('contributors_registration.id'), nullable=False) # Assumes 'ContributorsRegistration' model + issue_id = Column(BigInteger, ForeignKey('issues.id'), nullable=False) # Assumes 'Issues' model + activity = Column(Text, nullable=True) + created_at = Column(DateTime, nullable=True) + updated_at = Column(DateTime, nullable=True) + mentor_id = Column(BigInteger, ForeignKey('mentor_details.id'), nullable=True) # Assumes 'MentorDetails' model + + contributor = relationship('ContributorsRegistration', back_populates='user_activities') + issue = relationship('Issues', back_populates='user_activities') + mentor = relationship('MentorDetails', back_populates='user_activities') + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'contributor_id': self.contributor_id, + 'issue_id': self.issue_id, + 'activity': self.activity, + 'created_at': self.created_at, + 'updated_at': self.updated_at, + 'mentor_id': self.mentor_id + } + +class UserBadges(Base): + __tablename__ = 'user_badges' + id = Column(UUID(as_uuid=True), primary_key=True) + user_id = Column(BigInteger, ForeignKey('users.id'), nullable=False) # Assumes 'Users' model + badge_id = Column(BigInteger, ForeignKey('badges.id'), nullable=False) # Assumes 'Badges' model + created_at = Column(DateTime, nullable=True) + updated_at = Column(DateTime, nullable=True) + + user = relationship('Users', back_populates='user_badges') + badge = relationship('Badges', back_populates='user_badges') + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'user_id': self.user_id, + 'badge_id': self.badge_id, + 'created_at': self.created_at, + 'updated_at': self.updated_at + } + +class UserCertificates(Base): + __tablename__ = 'user_certificates' + id = Column(UUID(as_uuid=True), primary_key=True) + user_id = Column(BigInteger, ForeignKey('users.id'), nullable=False) # Assumes 'Users' model + certificate_link = Column(Text, nullable=True) + created_at = Column(DateTime, nullable=True) + updated_at = Column(DateTime, nullable=True) + + user = relationship('Users', back_populates='user_certificates') + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'user_id': self.user_id, + 'certificate_link': self.certificate_link, + 'created_at': self.created_at, + 'updated_at': self.updated_at + } + + + +### + +class UserPointsMapping(Base): + __tablename__ = 'user_points_mapping' + id = Column(UUID(as_uuid=True), primary_key=True) + contributor = Column(BigInteger, ForeignKey('contributors_registration.id'), nullable=True) # Assumes 'ContributorsRegistration' model + points = Column(Integer, nullable=False) + level = Column(String(50), nullable=True) + created_at = Column(DateTime, default=func.now(), nullable=False) # Set to current time when created + updated_at = Column(DateTime, default=func.now(), onupdate=func.now(), nullable=False) + mentor_id = Column(BigInteger, ForeignKey('mentor_details.id'), nullable=True) # Assumes 'MentorDetails' model + + contributors = relationship('ContributorsRegistration', back_populates='user_points_mappings') + mentor = relationship('MentorDetails', back_populates='user_points_mappings') + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'contributor_id': self.contributor, + 'points': self.points, + 'level': self.level, + 'created_at': self.created_at, + 'updated_at': self.updated_at, + 'mentor_id': self.mentor_id + } + +class Users(Base): + __tablename__ = 'users' + + id = Column(BigInteger, primary_key=True) # Assumes id is the primary key + name = Column(Text, nullable=True) + discord = Column(Text, unique=True, nullable=True) + github = Column(Text, nullable=True) + points = Column(Integer, nullable=True) + level = Column(Text, nullable=True) + created_at = Column(DateTime, nullable=True) + updated_at = Column(DateTime, nullable=True) + + user_badges = relationship('UserBadges', back_populates='user') + user_certificates = relationship('UserCertificates', back_populates='user') + + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'id': self.id, + 'name': self.name, + 'discord': self.discord, + 'github': self.github, + 'points': self.points, + 'level': self.level, + 'created_at': self.created_at, + 'updated_at': self.updated_at + } + +class VcLogs(Base): + __tablename__ = 'vc_logs' + + id = Column(BigInteger, primary_key=True) # Auto field + created_at = Column(DateTime, nullable=False) + discord_id = Column(BigInteger, nullable=True) + discord_name = Column(Text, nullable=True) + option = Column(Text, nullable=True) + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'id': self.id, + 'created_at': self.created_at, + 'discord_id': self.discord_id, + 'discord_name': self.discord_name, + 'option': self.option + } + +class GitHubProfileData(Base): + __tablename__ = 'github_profile_data' + + github_username = Column(String, primary_key=True) + discord_id = Column(BigInteger, nullable=False) + classroom_points = Column(Integer, nullable=False, default=0) + prs_raised = Column(Integer, nullable=False, default=0) + prs_reviewed = Column(Integer, nullable=False, default=0) + prs_merged = Column(Integer, nullable=False, default=0) + dpg_points = Column(Integer, nullable=False, default=0) + milestone = Column(Integer, nullable=False, default=0) + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'github_username': self.github_username, + 'discord_id': self.discord_id, + 'classroom_points': self.classroom_points, + 'prs_raised': self.prs_raised, + 'prs_reviewed': self.prs_reviewed, + 'prs_merged': self.prs_merged, + 'dpg_points': self.dpg_points, + 'milestone': self.milestone, + } + +class CommunityOrgs(Base): + __tablename__ = 'community_orgs' + + id = Column(BigInteger, primary_key=True, autoincrement=True) + name = Column(Text, nullable=True) + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'id': self.id, + 'name': self.name + } + + + +class ContributorPoints(Base): + __tablename__ = 'contributor_points' + + id = Column(BigInteger, primary_key=True, autoincrement=True) + contributors_id = Column(BigInteger, ForeignKey('contributors_registration.id'), nullable=True) + total_points = Column(Integer, nullable=False, default=0) + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'id': self.id, + 'contributors_id': self.contributors_id, + 'total_points': self.total_points + } + +class MentorNotAdded(Base): + __tablename__ = 'mentor_not_added' + + id = Column(BigInteger, primary_key=True, autoincrement=True) + mentor_github_id = Column(BigInteger, nullable=True) + issue_id = Column(BigInteger, nullable=True) + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'id': self.id, + 'mentor_github_id': self.mentor_github_id, + 'issue_id': self.issue_id + } + + + +class Leaderboard(Base): + __tablename__ = 'leaderboard' + + discord_id = Column(BigInteger, primary_key=True, autoincrement=False) + github_id = Column(BigInteger, nullable=False) + github_url = Column(Text, nullable=False) + apprentice_badge = Column(Boolean, nullable=True) + converser_badge = Column(Boolean, nullable=False, default=False) + rockstar_badge = Column(Boolean, nullable=False, default=False) + enthusiast_badge = Column(Boolean, nullable=False, default=False) + rising_star_badge = Column(Boolean, nullable=False, default=False) + github_x_discord_badge = Column(Boolean, nullable=False, default=False) + points = Column(Integer, nullable=False, default=0) + bronze_badge = Column(Boolean, nullable=False, default=False) + silver_badge = Column(Boolean, nullable=False, default=False) + gold_badge = Column(Boolean, nullable=False, default=False) + ruby_badge = Column(Boolean, nullable=False, default=False) + diamond_badge = Column(Boolean, nullable=False, default=False) + certificate_link = Column(Text, nullable=True) + + def __repr__(self): + return f"" + + def to_dict(self): + return { + 'discord_id': self.discord_id, + 'github_id': self.github_id, + 'github_url': self.github_url, + 'apprentice_badge': self.apprentice_badge, + 'converser_badge': self.converser_badge, + 'rockstar_badge': self.rockstar_badge, + 'enthusiast_badge': self.enthusiast_badge, + 'rising_star_badge': self.rising_star_badge, + 'github_x_discord_badge': self.github_x_discord_badge, + 'points': self.points, + 'bronze_badge': self.bronze_badge, + 'silver_badge': self.silver_badge, + 'gold_badge': self.gold_badge, + 'ruby_badge': self.ruby_badge, + 'diamond_badge': self.diamond_badge, + 'certificate_link': self.certificate_link + } \ No newline at end of file diff --git a/db/server.py b/db/server.py index e69de29..f098c7f 100644 --- a/db/server.py +++ b/db/server.py @@ -0,0 +1,938 @@ +import dotenv +import os +## +from sqlalchemy.future import select +from sqlalchemy.orm import sessionmaker, aliased +from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession +from sqlalchemy.pool import NullPool +from sqlalchemy.ext.declarative import DeclarativeMeta +from models import Base, ContributorsRegistration,GithubClassroomData, IssueContributors +from sqlalchemy import delete, insert +from sqlalchemy import select, asc, desc,update, join +from sqlalchemy.exc import IntegrityError +from sqlalchemy.sql import exists +from datetime import datetime +from sqlalchemy import cast, String ,and_ +from sqlalchemy.dialects.postgresql import ARRAY +from models import Issues, CommunityOrgs, PointSystem, PrHistory + +# dotenv.load_dotenv(".env") + + +# def get_postgres_uri(): +# DB_HOST = os.getenv('POSTGRES_DB_HOST') +# DB_NAME = os.getenv('POSTGRES_DB_NAME') +# DB_USER = os.getenv('POSTGRES_DB_USER') +# DB_PASS = os.getenv('POSTGRES_DB_PASS') + +# # DB_URL = os.getenv('DATABASE_URL') +# # print('db') +# return f'postgresql+asyncpg://{DB_USER}:{DB_PASS}@{DB_HOST}/{DB_NAME}' + + +class ServerQueries: + + # def __init__(self): + # DATABASE_URL = get_postgres_uri() + # # Initialize Async SQLAlchemy + # engine = create_async_engine(DATABASE_URL, echo=False,poolclass=NullPool) + # async_session = sessionmaker(autocommit=False, autoflush=False, bind=engine, class_=AsyncSession) + # self.session = async_session + + # def get_instance(): + # return PostgresORM() + + def convert_dict(self,data): + try: + if type(data) == list: + data = [val.to_dict() for val in data] + else: + return [data.to_dict()] + + return data + except Exception as e: + print(e) + raise Exception + + + def get_class_by_tablename(self,tablename): + try: + for cls in Base.registry._class_registry.values(): + if isinstance(cls, DeclarativeMeta): + if hasattr(cls, '__tablename__') and cls.__tablename__ == tablename: + return cls + return None + except Exception as e: + print(f"ERROR get_class_by_tablename - {e}") + return None + + async def readAll(self,table_class): + try: + table = self.get_class_by_tablename(table_class) + # Query all records from the specified table class + async with self.session() as session: + stmt = select(table) + result = await session.execute(stmt) + + data = result.scalars().all() + result = self.convert_dict(data) + return result + except Exception as e: + print(f"An error occurred -read_all_from_table : {e}") + return None + + + async def deleteComment(self,issue_id,table_name): + try: + table = self.get_class_by_tablename(table_name) + async with self.session() as session: + stmt = delete(table).where(table.issue_id == issue_id) + await session.execute(stmt) + await session.commit() + + return True + + except Exception as e: + print(f"An error occurred - deleteComment: {e}") + return False + + async def read(self, table, filters=None, select_columns=None, order=None, limit=None, offset=None): + """ + Reads data from a table in the database using SQLAlchemy ORM. + """ + try: + table_class = self.get_class_by_tablename(table) + + # Select specific columns or all columns if None + if select_columns: + stmt = select([getattr(table_class, col) for col in select_columns]) + else: + stmt = select(table_class) + + # Apply filters + if filters: + for column, condition in filters.items(): + if isinstance(condition, tuple) and len(condition) == 2: + operation, value = condition + col_attr = getattr(table_class, column) + if operation == 'gt': + stmt = stmt.where(col_attr > value) + elif operation == 'lt': + stmt = stmt.where(col_attr < value) + elif operation == 'gte': + stmt = stmt.where(col_attr >= value) + elif operation == 'lte': + stmt = stmt.where(col_attr <= value) + else: + stmt = stmt.where(getattr(table_class, column) == condition) + + # Apply ordering + if order: + for column, direction in order.items(): + if direction == 'asc': + stmt = stmt.order_by(asc(getattr(table_class, column))) + elif direction == 'desc': + stmt = stmt.order_by(desc(getattr(table_class, column))) + + # Apply limit + if limit: + stmt = stmt.limit(limit) + + # Apply offset + if offset: + stmt = stmt.offset(offset) + + async with self.session() as session: + result = await session.execute(stmt) + data = result.scalars().all() + + # Convert result to dictionary + return [row.to_dict() for row in data] + + except Exception as e: + print(f"An error occurred - read: {e}") + return None + + + async def add_discord_metrics(self, discord_metrics): + try: + async with self.session() as session: + DiscordMetrics = self.get_class_by_tablename("discord_metrics") + + for metric in discord_metrics: + stmt = select(DiscordMetrics).where(DiscordMetrics.product_name == metric["product_name"]) + result = await session.execute(stmt) + existing_record = result.scalars().first() + + if existing_record: + update_stmt = ( + update(DiscordMetrics) + .where(DiscordMetrics.product_name == metric["product_name"]) + .values( + mentor_messages=metric["mentor_messages"], + contributor_messages=metric["contributor_messages"] + ) + .returning(DiscordMetrics) + ) + updated_data = await session.execute(update_stmt) + data = updated_data.scalars().first() + else: + new_record = DiscordMetrics(**metric) + session.add(new_record) + await session.commit() + await session.refresh(new_record) + data = new_record + + await session.commit() + return data + + except IntegrityError as e: + print(f"An error occurred: {e}") + await session.rollback() + return None + + async def add_github_metrics(self, github_metrics): + try: + async with self.session() as session: + for metric in github_metrics: + GithubMetrics = self.get_class_by_tablename("github_metrics") + + # Check if the metric already exists in the database + stmt = select(GithubMetrics).where(GithubMetrics.product_name == metric["product_name"]) + result = await session.execute(stmt) + existing_record = result.scalars().first() + + if existing_record: + update_data = {key: value for key, value in metric.items() if key != "product_name"} + + update_stmt = ( + update(GithubMetrics) + .where(GithubMetrics.product_name == metric["product_name"]) + .values(update_data) + .returning(GithubMetrics) + ) + updated_data = await session.execute(update_stmt) + data = updated_data.scalars().first() + else: + # Insert the new metric if it doesn't exist + new_record = GithubMetrics(**metric) + session.add(new_record) + await session.commit() + await session.refresh(new_record) + data = new_record + + await session.commit() + return data + + except IntegrityError as e: + print(f"An error occurred: {e}") + await session.rollback() + return None + + async def check_exists(self,discord_id, assignment_id): + try: + # Construct the query for check exists + async with self.session() as session: + stmt = ( + select(exists() + .where((GithubClassroomData.discord_id.is_(None)) | (GithubClassroomData.discord_id == discord_id)) + .where(GithubClassroomData.assignment_id == assignment_id) + ) + ) + result = await session.execute(stmt) + exists_result = result.scalar() + + return exists_result + + except Exception as e: + print(f"An error occurred: {e}") + return None + + async def save_classroom_records(self, data): + try: + async with self.session() as session: + for record in data: + try: + new_record = GithubClassroomData( + **record) + session.add(new_record) + + await session.commit() + print("Record inserting successfully!") + except Exception as e: + await session.rollback() + print("Error updating record:", e) + + return True + except Exception as e: + print(f"An error occurred save_classroom_records: {e}") + return False + + async def update_classroom_records(self, data): + async with self.session() as session: + for record in data: + try: + stmt = ( + update(GithubClassroomData). + where( + GithubClassroomData.assignment_id == record.get('assignment_id'), + GithubClassroomData.discord_id == cast(str(record.get('discord_id')),String) + ). + values( + assignment_name=record.get('assignment', {}).get('title'), + assignment_url=record.get('assignment', {}).get('classroom', {}).get('url'), + c4gt_points=record.get('c4gt_points'), + github_username=record.get('students', [{}])[0].get('login'), + points_available=record.get('points_available'), + points_awarded=record.get('points_awarded',0), + roster_identifier=record.get('roster_identifier',""), + starter_code_url=record.get('starter_code_url', record.get('repository', {}).get('html_url')), + student_repository_name=record.get('repository', {}).get('full_name'), + student_repository_url=record.get('repository', {}).get('html_url'), + submission_timestamp=record.get('submission_timestamsp', datetime.now()), + updated_at=record.get('updated_at') + ) + ) + result = await session.execute(stmt) + await session.commit() + print("Record updated successfully!") + return True + except Exception as e: + await session.rollback() + print("Error updating record:", e) + return False + + async def getdiscord_from_cr(self,github_url): + try: + Table = self.get_class_by_tablename("contributors_registration") + async with self.session() as session: + stmt = (select(Table.discord_id).where(Table.github_url == github_url)) + result = await session.execute(stmt) + exists_result = result.scalar() + + return exists_result + except Exception as e: + print("Error - getdiscord_from_cr:", e) + return None + + + async def add_data(self, data: dict, table_name: str): + try: + table_class = self.get_class_by_tablename(table_name) + if not table_class: + raise ValueError(f"Table class for {table_name} not found") + + async with self.session() as session: + new_record = table_class(**data) + session.add(new_record) + await session.commit() + await session.refresh(new_record) + + return new_record + except Exception as e: + print("Error - add_data:", e) + return None + + async def insert_org(self, name): + try: + async with self.session() as session: + table = self.get_class_by_tablename("community_orgs") + if not table: + raise ValueError(f"No ORM class found for table community_orgs") + + stmt = insert(table).values( + name=name + ).returning(table) + + result = await session.execute(stmt) + + await session.commit() + inserted_record = result.fetchone() + print("inserted_record ", {"id": inserted_record[0], "name": inserted_record[1]}) + return {"id": inserted_record[0], "name": inserted_record[1]} + + except Exception as e: + print(f"Error in record_created_ticket method: {e}") + return None + + + + async def check_record_exists(self, table_name, filter_column, filter_value): + try: + table_class = self.get_class_by_tablename(table_name) + if not table_class: + raise ValueError(f"No ORM class found for table '{table_name}'") + + async with self.session() as session: + stmt = ( + select(table_class) + .where(getattr(table_class, filter_column) == filter_value) + ) + result = await session.execute(stmt) + exists = result.scalars().first() is not None + return True if exists else False + except Exception as e: + print(f"An error occurred - check_record_exists: {e}") + return False + + + async def delete(self,table_name, filter_column, filter_value): + try: + table = self.get_class_by_tablename(table_name) + async with self.session() as session: + stmt = delete(table).where(getattr(table, filter_column) == filter_value) + await session.execute(stmt) + await session.commit() + return True + + except Exception as e: + print(f"An error occurred - delete: {e}") + return False + + + async def get_data(self,col_name,table_name,value,condition=None): + try: + Table = self.get_class_by_tablename(table_name) + async with self.session() as session: + stmt = (select(Table).where(getattr(Table, col_name) == value)) + # Execute the query + result = await session.execute(stmt) + exists_result = result.scalar() + if exists_result: + return self.convert_dict(exists_result) + else: + return None + + except Exception as e: + print(f"An error occurred - get_data: {e}") + return None + + async def checkIsTicket(self, issue_id): + try: + tables_to_check = ['issues'] + + async with self.session() as session: + data = [] + for table_name in tables_to_check: + table_class = self.get_class_by_tablename(table_name) + if not table_class: + continue + stmt = select(table_class).where(getattr(table_class, 'issue_id') == issue_id) + result = await session.execute(stmt) + records = result.scalars().all() + + if records: + data.extend(records) + # Check if data was found in any of the tables + if len(data) > 0: + return True + else: + return False + except Exception as e: + print(f"An error occurred - check_is_ticket: {e}") + return False + + + async def record_created_ticket(self, data,table_name): + try: + async with self.session() as session: + # Dynamically get the ORM class for the table + table = self.get_class_by_tablename(table_name) + + # Build and execute the query to check if the issue_id already exists + # stmt = select(table).where(table.issue_id == data['issue_id']) + + stmt = insert(table).values( + link=data['link'], + labels=cast(data['labels'], ARRAY(String)), # Cast to ARRAY type + complexity=data['complexity'], + technology=data['technology'], + status=data['status'], + created_at=data['created_at'], + updated_at=data['updated_at'], + title=data['title'], + domain=data['domain'], + description=f"{data['description']}", + org_id=data['org_id'], + issue_id=data['issue_id'], + project_type=data['project_type'] + ).returning(table) + + result = await session.execute(stmt) + + await session.commit() + + # inserted_record = await result.fetchone() + # print("inserted result ", inserted_record) + return result + + except Exception as e: + print(f"Error in record_created_ticket method: {e}") + return None + + + async def record_updated_ticket(self, data, table_name): + try: + async with self.session() as session: + # Dynamically get the ORM class for the table + table = self.get_class_by_tablename(table_name) + + # Build the update query + stmt = ( + update(table) + .where(table.issue_id == data['issue_id']) # Match the existing issue by issue_id + .values( + link=data['link'], + labels=cast(data['labels'], ARRAY(String)), # Cast to ARRAY type + complexity=data['complexity'], + technology=data['technology'], + status=data['status'], + created_at=data['created_at'], + updated_at=data['updated_at'], + title=data['title'], + description=f"{data['description']}", + org_id=data['org_id'] + ) + .returning(table) # Return the updated row(s) + ) + + # Execute the update statement + result = await session.execute(stmt) + + # Commit the transaction + await session.commit() + + return result + except Exception as e: + print(f"Error in record_updated_ticket method: {e}") + return None + + + async def update_data(self, data, col_name, table_name): + try: + table_class = self.get_class_by_tablename(table_name) + + async with self.session() as session: + stmt = ( + update(table_class) + .where(getattr(table_class, col_name) == data[col_name]) + .values(**data) + .returning(table_class) + ) + + result = await session.execute(stmt) + await session.commit() + + updated_record = result.scalars().first() + # Convert the updated record to a dictionary before returning + return self.convert_dict(updated_record) if updated_record else None + + except Exception as e: + print(f"Error in update_data: {e}") + return None + + + async def update_pr_data(self, data, table_name): + try: + table_class = self.get_class_by_tablename(table_name) + + async with self.session() as session: + new_pr_history = PrHistory( + created_at= data['created_at'], + api_url=data['api_url'], + html_url= data['html_url'], + raised_by= data['raised_by'], + raised_at= data['raised_at'], + raised_by_username= data['raised_by_username'], + status= data['status'], + is_merged= data['is_merged'], + merged_by= data['merged_by'], + merged_at= data['merged_at'], + merged_by_username= data['merged_by_username'], + pr_id= data['pr_id'] + ) + stmt = ( + update(table_class) + .where(table_class.pr_id == data['pr_id']) # Match the existing issue by issue_id + .values( + + ) + .returning(table_class) # Return the updated row(s) + ) + + # Execute the update statement + result = await session.execute(stmt) + + # Commit the transaction + await session.commit() + + # Optionally fetch the updated record(s) + updated_record = result.fetchone() + + return updated_record if updated_record else None + + except Exception as e: + print(f"Error in update_data: {e}") + return None + + + async def update_pr_history(self, pr_id, data): + try: + async with self.session() as session: + # Query for the existing record based on pr_id (or some unique identifier) + stmt = select(PrHistory).where(PrHistory.pr_id == pr_id) + result = await session.execute(stmt) + pr_history_record = result.scalars().first() + + if pr_history_record: + # Update the fields with new values from data + pr_history_record.created_at = data['created_at'] + pr_history_record.api_url = data['api_url'] + pr_history_record.html_url = data['html_url'] + pr_history_record.raised_by = data['raised_by'] + pr_history_record.raised_at = data['raised_at'] + pr_history_record.raised_by_username = data['raised_by_username'] + pr_history_record.status = data['status'] + pr_history_record.is_merged = data['is_merged'] + pr_history_record.merged_by = data['merged_by'] + pr_history_record.merged_at = None if data['merged_at'] is None else data['merged_at'] + pr_history_record.merged_by_username = data['merged_by_username'] + pr_history_record.ticket_url = data['ticket_url'] + pr_history_record.ticket_complexity = data['ticket_complexity'] + + # Commit the changes to the database + await session.commit() + + # Optionally refresh the object + await session.refresh(pr_history_record) + + return pr_history_record + else: + print(f"Record with pr_id {pr_id} not found") + return None + + except Exception as e: + print(f"Error in update_pr_history: {e}") + return None + + + async def addPr(self, prData, issue_id): + try: + if issue_id: + ticket = await self.get_data("issue_id","issues",issue_id,None) + if len(ticket) ==0: + ticket = await self.get_data("issue_id","dmp_tickets",issue_id,None) + + for pr in prData: + data = { + # "api_url":data["url"], + "html_url":pr["html_url"], + "pr_id":pr["pr_id"], + "raised_by":pr["raised_by"], + "raised_at":pr["raised_at"], + "raised_by_username":pr["raised_by_username"], + "status":pr["status"], + "is_merged":pr["is_merged"] if pr.get("is_merged") else None, + "merged_by":pr["merged_by"] if pr["merged_by"] else None, + "merged_by_username":pr["merged_by_username"] if pr.get("merged_by_username") else None, + "merged_at":pr["merged_at"] if pr.get("merged_at") else None, + "points": ticket[0]["ticket_points"] if issue_id else 0, + "ticket_url":ticket[0]["api_endpoint_url"] if issue_id else 0 + } + resp = await self.add_data(data,"connected_prs") + + return True + except Exception as e: + print(f"Error in addPr: {e}") + return None + + + async def get_issue_from_issue_id(self,issue_id): + try: + async with self.session() as session: + # Dynamically get the ORM class for the table + table = self.get_class_by_tablename("issues") + + # Build and execute the query to check if the issue_id already exists + stmt = select(table).where(table.issue_id == issue_id) + result = await session.execute(stmt) + issues = result.scalars().first() + + if issues: + return self.convert_dict(issues) + return None + + except Exception as e: + print(f"Error in get_issue_from_issue_id method: {e}") + return None + + async def get_contributors_from_issue_id(self,issue_id): + try: + async with self.session() as session: + # Dynamically get the ORM class for the table + table = self.get_class_by_tablename("issue_contributors") + + # Build and execute the query to check if the issue_id already exists + stmt = select(table).where(table.issue_id == issue_id) + result = await session.execute(stmt) + issues = result.scalars().all() + + if issues: + return self.convert_dict(issues) + return None + + except Exception as e: + print(f"Error in get_contributors_from_issue_id method: {e}") + return None + + async def get_pointsby_complexity(self, complexity_type,type="Contributor"): + try: + async with self.session() as session: + # Dynamically get the ORM class for the table + table = self.get_class_by_tablename("points_mapping") + + # Build and execute the query with multiple conditions + stmt = select(table).where( + and_( + table.complexity == complexity_type, + table.role == type + ) + ) + result = await session.execute(stmt) + points = result.scalars().all() + return points[0].points if points else 0 + + except Exception as e: + print(f"Error in get_pointsby_complexity method: {e}") + return None + + async def upsert_point_transaction(self, issue_id, user_id, points,user_type="Contributor"): + try: + async with self.session() as session: + table = self.get_class_by_tablename("point_transactions") + column_map = { + "Contributor": table.user_id, + "Mentor": table.mentor_id, + } + chosen_column = column_map.get(user_type) + stmt = select(table).where( + and_( + table.issue_id == issue_id, + chosen_column == user_id + ) + ) + + result = await session.execute(stmt) + transaction = result.scalars().one_or_none() + + if transaction: + # Record exists, so update the points column + update_stmt = ( + update(table) + .where(and_(table.issue_id == issue_id, table.user_id == user_id)) + .values(point=points) + ) + await session.execute(update_stmt) + await session.commit() + return True + + else: + # Record does not exist, so create a new one + new_transaction = table(issue_id=issue_id,point=points) + setattr(new_transaction, chosen_column.key, user_id) + session.add(new_transaction) + await session.commit() + return True + + except Exception as e: + print(f"Error in upsert_point_transaction method: {e}") + return None + + async def save_user_points(self, user_id, points,user_type="Contributor"): + try: + async with self.session() as session: + table = self.get_class_by_tablename("user_points_mapping") + column_map = { + "Contributor": table.contributor, + "Mentor": table.mentor_id, + } + chosen_column = column_map.get(user_type) + stmt = select(table).where(chosen_column == user_id) + + result = await session.execute(stmt) + transaction = result.scalars().one_or_none() + + + if transaction: + addon_points = points + transaction.points + update_stmt = ( + update(table) + .where(chosen_column == user_id) + .values(points=addon_points) + ) + await session.execute(update_stmt) + await session.commit() + return True + + else: + # Record does not exist, so create a new one + new_transaction = table(points=points) + setattr(new_transaction, chosen_column.key, user_id) + session.add(new_transaction) + await session.commit() + return True + + except Exception as e: + print(f"Error in save_user_points method: {e}") + return None + + + async def deleteIssueComment(self, commentId): + try: + async with self.session() as session: + # Dynamically get the ORM class for the table + table = self.get_class_by_tablename("ticket_comments") + + # Build and execute the query with multiple conditions + stmt = delete(table).where( + getattr(table, "id") == commentId + ) + result = await session.execute(stmt) + is_deleted = result.scalars().all() + return is_deleted + except Exception as e: + print(f"Error in deleting issue comments: {e}") + return None + + + async def getUserLeaderBoardData(self): + try: + async with self.session() as session: + orgs_alias = aliased(CommunityOrgs) + points_alias = aliased(PointSystem) + + # Join the Issues table with the CommunityOrgs and PointSystem + stmt = ( + select(Issues, orgs_alias, points_alias) + .join(orgs_alias, Issues.org_id == orgs_alias.id, isouter=True) # Left join with CommunityOrgs + .join(points_alias, Issues.complexity == points_alias.complexity, isouter=True) # Left join with PointSystem + ) + + # Execute the statement + result = await session.execute(stmt) + + # Fetch all the results + records = result.all() + + # Convert to dictionary format for readability (if needed) + return [ + { + 'issue': issue.to_dict(), + 'community_org': org.to_dict() if org else None, + 'point_system': points.to_dict() if points else None + } + for issue, org, points in records + ] + except Exception as e: + print('Exception occured while getting users leaderboard data ', e) + return None + + + async def get_joined_data_with_filters(self, filters=None): + async with self.session() as session: + # Aliases for the tables + issues = aliased(Issues) + orgs = aliased(CommunityOrgs) + points = aliased(PointSystem) + + # Base query with the join + query = select( + issues, + orgs, + points + ).join( + orgs, issues.org_id == orgs.id + ).join( + points, points.complexity == issues.complexity + ) + + # If dynamic filters are provided, apply them + if filters: + filter_conditions = [] + for field, value in filters.items(): + filter_conditions.append(getattr(issues, field) == value) + + query = query.where(and_(*filter_conditions)) + + # Execute the query and return the results + result = await session.execute(query) + records = result.all() + + # Convert results to dictionaries if necessary + return [dict(issue=record[0].to_dict(), org=record[1].to_dict(), points=record[2].to_dict()) for record in records] + + async def fetch_filtered_issues(self, filters): + try: + async with self.session() as session: + # Start building the query by joining tables + query = ( + select(Issues, CommunityOrgs, PointSystem, IssueContributors, ContributorsRegistration) + .join(CommunityOrgs, Issues.org_id == CommunityOrgs.id) + .join(PointSystem, Issues.complexity == PointSystem.complexity) + .outerjoin(IssueContributors, Issues.id == IssueContributors.issue_id) + .outerjoin(ContributorsRegistration, IssueContributors.contributor_id == ContributorsRegistration.id) + .where(Issues.complexity != 'Beginner') + .order_by(desc(Issues.id)) + ) + + # Prepare dynamic filter conditions + conditions = [] + + # Check if there are filters for Issues table + if 'issues' in filters: + for field, value in filters['issues'].items(): + conditions.append(getattr(Issues, field) == value) + + # Check if there are filters for CommunityOrgs table + if 'org' in filters: + for field, value in filters['org'].items(): + conditions.append(getattr(CommunityOrgs, field) == value) + + # Check if there are filters for PointSystem table + if 'points' in filters: + for field, value in filters['points'].items(): + conditions.append(getattr(PointSystem, field) == value) + + # Apply filters (if any) to the query + if conditions: + query = query.where(and_(*conditions)) + + # Execute the query and fetch results + result = await session.execute(query) + rows = result.fetchall() + + # Process the result into a dictionary or a preferred format + data = [] + for row in rows: + issue = row.Issues.to_dict() + org = row.CommunityOrgs.to_dict() if row.CommunityOrgs else None + point_system = row.PointSystem.to_dict() + contributors_registration = row.ContributorsRegistration.to_dict() if row.ContributorsRegistration else None + data.append({ + 'issue': issue, + 'org': org, + 'points': point_system, + 'contributors_registration': contributors_registration + }) + + return data + + except Exception as e: + print(f"Error in fetch_filtered_issues: {e}") + return None + + + def add_github_user(self, user): + data = self.client.table("contributors_registration").upsert(user, on_conflict=["github_id", "discord_id"]).execute() + return data.data + diff --git a/migrations/env.py b/migrations/env.py index 36112a3..040306a 100644 --- a/migrations/env.py +++ b/migrations/env.py @@ -2,6 +2,7 @@ from sqlalchemy import engine_from_config from sqlalchemy import pool +from db.models import shared_metadata from alembic import context @@ -9,6 +10,13 @@ # access to the values within the .ini file in use. config = context.config +from dotenv import load_dotenv +import os + +load_dotenv() +url = os.getenv("DATABASE_URL") +config.set_main_option("sqlalchemy.url", url) + # Interpret the config file for Python logging. # This line sets up loggers basically. if config.config_file_name is not None: @@ -18,7 +26,7 @@ # for 'autogenerate' support # from myapp import mymodel # target_metadata = mymodel.Base.metadata -target_metadata = None +target_metadata = shared_metadata # other values from the config, defined by the needs of env.py, # can be acquired: diff --git a/requirements.txt b/requirements.txt index 09654fd487b9ff1c6b983e3b0358619ab63fe849..7b9da297149e49c45bcaa38cb17cd7493e90cebf 100644 GIT binary patch delta 50 xcmbNu delta 5 McmZ3&STUgj00un*CIA2c diff --git a/sample.env b/sample.env new file mode 100644 index 0000000..0e971de --- /dev/null +++ b/sample.env @@ -0,0 +1,5 @@ +POSTGRES_DB_HOST="" +POSTGRES_DB_NAME="" +POSTGRES_DB_USER=" +POSTGRES_DB_PASS="" +DATABASE_URL="" \ No newline at end of file From 4ade940b5532bc97a8ba08b2bb9e78cce5106f23 Mon Sep 17 00:00:00 2001 From: jaanbaaz Date: Fri, 6 Dec 2024 12:25:31 +0530 Subject: [PATCH 16/32] Added models, migrations, and database interactions --- db/__pycache__/__init__.cpython-310.pyc | Bin 0 -> 1348 bytes db/__pycache__/models.cpython-310.pyc | Bin 0 -> 44153 bytes db/models.py | 137 ------------------ migrations/__pycache__/env.cpython-310.pyc | Bin 0 -> 1963 bytes ...b9b29e41_initial_migration.cpython-310.pyc | Bin 0 -> 913 bytes .../e7eab9b29e41_initial_migration.py | 30 ++++ 6 files changed, 30 insertions(+), 137 deletions(-) create mode 100644 db/__pycache__/__init__.cpython-310.pyc create mode 100644 db/__pycache__/models.cpython-310.pyc create mode 100644 migrations/__pycache__/env.cpython-310.pyc create mode 100644 migrations/versions/__pycache__/e7eab9b29e41_initial_migration.cpython-310.pyc create mode 100644 migrations/versions/e7eab9b29e41_initial_migration.py diff --git a/db/__pycache__/__init__.cpython-310.pyc b/db/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d65e3e5ffbebd8b74be591be79a3f64ee1e45234 GIT binary patch literal 1348 zcmZ`(!EW0|5Zzs_C`zInA!(De$e~vuh{3N- zC@1HdF9-r8$Nmy8IrP+D@F_D(3zboHi5;O0y9Cy^lj*$A3HckH&EyE$ia&;|zLy%6R7xcFqDX}*=2@ZYaE#te zS(}vw{3f&w%#*jufFDejT2ISTU-A*8Nip|&!A(g3dk?nV2UC$RRMM~XeS;Vv>S7)s z<`4(u$<~T}BGO-ZVB0GKHdxV5#*{iS2n}lzftxQfd1n&*mnn zfMy$yV);52A}g{=h|%Jr9zYLb-K|)f42xJCsxEDwN8D_IdrJWD34^^0+dcxLNJJ%x zn52@yczDv296q^h0kMAFo8Iv8^7tsFaDKf|NguXTas;3vR|K@9E6}rIl6~|*KM&H< z6|1=PKJxe8%3Bf9k_1xo$#Wo+tMt6AI}4bWR43LBdEHHxRhgD~o>fr?@tI!Q^P0cT z3R(N^lomVG}u$!z8ObI#jVn3b+|u189y7(4#es4@3r4danx^S`sT1x zs4;y^60Dj22|SAv>(ebZ0AeqH*kEL|KK37ukt#jQuPajE$Zac%ZHlUm5mTcf3&r6qBOY^ mCc3i`M6Dama5(>;kKotlI=>;6b^suCA)C{$BOfSJm~Ak#rCKzV8DMF4w-$)AK1N!hcC54&dkhbYD-8 z)8q73duly}o?haKYHux3NC@6n?W-jV$y$G*zm_VbY6FFV+F)U@HdGj@r3>lWaACNX zDP(FRg^^mekgbgtMr*l3t~OQ}tBn`NrHo{CqBdEW6uiGWCD%s^)4e_A$tQaXGvxum z*?W4Nlr!+g9%rDOd$O-Eg7ly>g!E8(2I)DZ)6Ou`!;+pyI^&EWJtFBXNN1f#a z73rKahV+=Ew;?_5OdvfW>Fr2QI#WnbNqPs;)6NXiGm_3DJ?qRNJtyg%NY6W4klrHc zT}W?rwjsSu(z}t~?(9H%homnM|zjD8|mGWz5?mXoXe5ET+&w}eT8!+ z(pO4)57K*_1*8`wy@2#p&ecd?E$OR}-s@b0^fi*c8tHw`wMbtp>Agt5$hi*b>m+>* z($_mTAbo?R_aS|wa}&}xN%~r(Z+31$`W8vQ22mz zf%F}cz5(ewotGf}5=q~P^a1A}(g!7d6Vi7%i%2g@`evjLId>y{x1?`D`ml2k()URE zR;2HBjv#$R(zhY~Qs+LT@00ZHNZ;=~fb;{BelgNVotGi~GD+`8`k3=_q+c%SJCJ_R zIga#kN#BX|3FjfCACmM-kbc;C1nEa4eE{iia9)A*D4Ni0q+coNyO2KVyb9@8 zNqQ0KS35#O5g){F5kL1u0F9p4_BMK-OsMH* zs*!l6@3p-j?0ux?cu%sYkzAkZg)Hptc}0(dGv|@>c+Z92qYM3By5d&wD5aI9ve$=4 zQf!zva$41E6}Nu6u4?Gq^uwjgqzDmvqb3(_a5Mp7RErN~v02YFy}1+i>Tl z`xb9M`H)*y?#YAhntSq4**)8+uby00^+%D*J-K+}Jtt1OXG*H1vx0dDO5~O<*y_rO&HH`>hxEOTSTVurJgk5P1MK=VN>xwH?5lv}LNYyH;%!<G+k7(_z`6Pa{0z4ar<0cL$QfTcI5x6oHkmiwIq{`;QlE2NMb0G@m*Q5Xb1Wbm}f z#q+}k?+2dIO0F=Xm0Ka}q(QC2-tfWI)x~3CwAQFsR+dzmxi5hCSJtZ4(y3}0cxp|!ZHX_;{t@R3N^p8#+w*A8d!gvx z--p`!jy7}qL-p^eICor1EZm9W5*6n{ubRg_q3)D7jMt;etEyPMoTc)uW_z2Hp;GqQ z3>|GrA+rz@3j!1NJlgvtTa>`B55FXS{Z8-u$j1q=oVpJAy!6_t1JUCY!CnKU)k<+q zRlT(7rDDbL1}m<+Rt5&o4SeE%>=g)+)8jgK`Ui=1C4sosVp`2;F_BO7)&I4 z^?&NC&57`XnVd%#M%7N1w2NRjfHzVsHuxemc`6c_8YSQp2$|{ju>AH$-N19V5deEq}g^*Tm@v67G8nqEkrMk}*k?WK5DKJ`}=9 z(!=I_CWMcavm|O~5cx-)AtwzXoO6bq431;Y2wq~=OWjq1UULhnQ*t~3(i$ka#kDn| zFZQ{*3cyQK66(BOzvM;E?5Wby+2U$_bq#8*>~_#oAMPYQseJ&X=Inl5#2zrsa`_H* zEzkFQ{gRA&`-+`^2aC87pgF2<-J6{qZ9p^g)kzn{)8~tkpG5AG^GKiLhB}2t6N7!AQRZ2~ujS7dvo}Lb zg=rL&)>r8*K-iX3PEx3G0QjK6heCKdgb#=COb8zd;aT9LC?^Gq&6)E_X8_`StR*#$ z)CBTj9D-|;CY5y3NKKiXodZ6P^CMEqme7^0!1-)hxw0*E zWqSzU0es#Wl^)3JF->8o9?KMVIg{Y8DQ|dj>D20p%F@{~)yEPN#fDCK14cwP?>v|n z;goOG^DZp2a^8UjSgE@C`sw`Q;$6q`rhq(cnf$qus?^uqyfs8l2#2b@czYKT>KYu> z)c}`v-&;SQua#ET^Q+KG4L9F7Q_kzk^A$ILzOEW)a0BzI`$IP`@Wg(CI{_|DoGH0^ zHWdo2v7U!MD_5JlDeC|NcsGE=C0P=vU7N2c`&{|)IEDBbhJ!> zY)LW|$#fJ6q*g5wCPO7`EEn7lIHqvy$8iA1 z6pn*94&XS1;~M{vyGm~9ZtKFK;pfsNvr!!d{B7>;94Q@@Vm zH-XsIa}4q|)*R@VcF~C0MX{7pv9s<9Hcf&d@+{39ngzhgJD8r|6!oRI9FNS}#XMU0!j7G(=6~U_s#J*rNRgvH|1SNt~ z1VT0ru`)rNNI#8>?lFKwZ>l%(b=u{rzQjMLl8Jvx^(VfXN+m9(1`=OM4JN*v8cO_Q zDxLUJYB=%5R3`CUY9w(nolX2hYBceMR4(!Nsj{)r zY?fk4ExWMUL{|jbKy*d-t`ZBUt-t7eYVR~rKtZQb@^8>{C%aqK~q2CS*0Sgm{hg1g?fP5^NqC{@tx6>J)1=(I)CEwA)Q1C<9g!u4GZ- zC{olRA&be$!g3mSM!}ppZ*cKUX|;jjVPSB|U?&2x8OY@3KFVFq8m_Pg1Tlmr&zc)4 zj%yfNlpUx5^)ftN^H%3nzT!{;NWQwlC~UR9;$Xa=ua#@3$||qmHON<1@=nFYklx9| zBsf=A7^;g3phVwT0FUgqPrjFU6w$_TCuU{JwaWRoF-kD5D# zWq^;F{Nupwy%V84lOcRc@_;GD+?qye#*{iMxpZxFp*!Za%ek;cyP6AIoh;~Q)XTt| zwF325c8-BDN0w}=xT@R^?tB2fbGSLNTxpzH3v=755+2-`d&c&MtJ@?8{TOQ195fZa zJD!XBa09vM;u`_}U)nC<%n@U^WR2Yd`vvZ{v7>spBBK%x;l05#japUAh#|NVVf+5 z2S(C`Tw7z+V`#Bb1TH~?K&*u|V&@3X6Fg3^PQYQh`bL7+62ys=AIHUucLBUGyCLP< z4avW6PPPz-FNn5nMi2!8MvMf$A^p%IgQA{XRDSH^i_Os12i-;23aWfBe(oLs8Fl)i zD{KYO4hU&|-W4y*9>moMVHUXi`Y~do_!)qp7&PLC8sFk+l+J(DGG*`A~z`a;A$?kMTb@&PVL~)0`ZK4c$HJ*Mxf~yT9mXIt@D_;Rg1*w zJkB6LaVo{E$xlT{T-%sAlhStOSSTMABYhaWf6{p6rr?X9jX7;>%!yD3yN9%$SD0K1 zXv>9bRIEQspVVpE_T;;TF(|`(;?3P*ixc?zmGCrR>{VZ?(A$+i4?Va^#Wx$KZ`-g< zlK7|D!W-G3tr1TbPvxW&p>k4aJC!3X&S@MuWan_5Lv;?*)8K$%;edrRSok zLhx3CZv*hsrz_>EBhp2^jc1=C_;!Lg9{emWUYrH6JeV|&aD7aDvAHFxOMRxC*)&s1 ztB2W^py{Gjr2WFOdL13{ z=gHKX#Wh=N_Io~TO?J(}y4BWdqA(6TEtYGdFaaFyby%@vY0b)0a1iE%rN=;o<6c;1 zx(}9@E1Z0#+qp2;!q+Nvse`$_v14@5oKU3|mo&#A&~WH>br9X+W$3d%NRp(lXpAYj z&3yo4O->wVLbq(fd%vxP>%(o|Bn9(RN!@+loQ%uF4EV4_rKB9!P`;#CHz{rG|Z=%w1!!SDTJhA-VQFMU+Dk~pp!0w zOZ&hlc7n^!)7m*(8g6L^CLQt&L($raEI8|BA}ecD=BSy-8a3(>V|R=It1kRDS~C`x z%$XD7&lcr)*c&)z+^GZRvarm?mfhCJp&AUtRGSPN(RoCPo1gP&w3;7nfe-4OubP#y{T z_B`jTEY2A#i%%F54q1JWRatM$^!(i`%a|Rd&mGQ2L&RnOoXpiUcLyin55!Po$;~6g z1rs&?xWkNDG{Gwxh8vUjv?cGL4V#qhclq=@-|-mn6isSb#815mfD~;^w0n8(X#g*a z>8B#(o3Jaq1QU&Vqf~{@z;)3{2*$wJ#hcV8F>Nc#(45VpGvE=0B$Sy_Zxmurp;s$Q zWqo13n_;ix+L<-CQgR#S2E~w~Wj}}dK{hMSI=h=a@SVufoa^iXpNXa;nTRtE z+MO7Tj>O*bJ3@2P4>8{b0vbN*M+iPb@KJ&vCHOId?&jBrk>z@FR~`ttYfz#VT12~W zn;2ls9nWjD_PX$z9*>jL5dLL2&ElLL9;RT&dGgta$wS^s8C-a)oO1@Bw8eK>^0{lat*Ch3 zvIdj-ZWKV?8r1WNL#OJ_dQrk2U;yfNR4XfIr3Ex2^(jmcdgC?`3h_zF!(-HBqa;qP ztN(-=V)?>h+uw~GP%W+9u+Qg;sSUUy;04hyKf(I`Gl8&+be)V=ev;>YiXcny(*!?5 z(48TuFiwym=7DI2@D&aifguyZ3d|SJ3h%_#Fe|`vbyjIRM|<7rQb((?Yu94$_*<}lA= zdL@#it>tl%E~X$#4W@Q4qSgA|&xo)j<9qyis5UMQ?nS^sqiQ035NtrVkJm5fBI$*% z=?T0DiCdfQZI1ip#nX8j_fUsiyb55WfdQ@$VsJ2o_JSjZbb~N-XMh^V|7Lc2WsOs8 z-l(4V4m{6c--~}%{X8o6Qk<-UPwgHik_f@8tN#Wx@-b;~eKQ|(5=i@q0&`IR*c=NU z+-H)>NG2I%C4truHuxo;`(*-Qm;X-ee-N;xnl(N_jBQoFLeQN*eim6C3-d=1aqNy7$L;_+@nxXs!k}Yl+m573 zDjjm$61n<0TqQYYNX{%khMo6)qy4eytQ%zla8C*($`j&i2Ucnn=kJ|9KfNtmoNIF^i~bA(Y=`GOAsanm@yK z{VI^=WJ|kzA-R!#9T2I=Olh9TOlhNtOA(U)o2@ucAk7QR329?@I{qEp_Csi>yA?>M zIT11_m@}T9zlN(}dWOi;aV~+|gf0{t7fhVy1YIb^DLO4q(P?ptPK(=_3vFA|tl$-z zAboIihEtro7{?XzP9I#II+c(}8cych5;>VajeJ8A!<c+e@-t!3)Kq)qgp5cY@LLq)w2Da)bP?@_>$B6AGxRc=J02}kk82Q9h z5(-0953-0TQsn#pW1mGPg1h4x#4@M*Hr)IFe;QxH4-tllJ|5SBhFX`sz_`2P>?vWP z*AOcKER6l@!}%hLjeH*z4Szz8I}GGsJNwS4z2xV`_B1pRD8lXWHdr88M2d*|i|tJj zKDV-~3Ir0$$N}5)3(e~;hJFv_eS)1rMxhuAk7OEUqU}bsimAq;WtqgaC95GKVA|<49u@Xrz%?QUXoAXrL{w= zQF70EaAPsxmn&=}O&{tq!U0`xi0L9iu}fNg{sBvg5?5r8L;@Gz4Wv2U)=*z}c3Gnk z3^0Va*;Ms^2>y`Zj|fr(9>E_Ih(hm9pudlN7fGPAK-%p+e*%!X;>qzdxEdx$5L}$E zH*EE3oK~Mig@8oqfuc1?Zxk&+d!81cJx_0x#c2UroIWXw(+;GEi{(rJpEPB1U4q5A zF2Uk6!0o+ubpr35Gvpo$b%U%P@h0w(c(uhUrtVd}Uc)lGQenzWVPn~*KC7wXesoTA z@dQ^siFer5-Yy0L*d+#~Q}wk*Uf+(ueRDhG0uZmw;cuSw_54_Ac7H@0H_1=CNv1cl z-CH9HiDxRGpX<1HAsnFwzpwu=dWIw$Iw4W-p)}$Fa&-72vKMY-M|%ZEP4{vVO``F~ zRNZBcibH=gX7ZIc<2Kfq;_AD)TR|+*x~^?H;sR@(M2J3CrZRvPYeL}AZj@@PUe?Uf zmCl!x1AlkUrq3btfG-jrx*L}pCPH6_(A>mF4u`q@Uz|{XLhz>qe@5`<1b;#BDT2Qw z_$z|HCiokIPZPX>fF;ICk}LR%oKmb)+`Gt?{l+Y-_#`im7Z3 zc*YjcfR|u(;M9}@2v$$SH%Ox!X1j5QV~L*x6WM@uBRGz5jWm{}O4LAM(onXnoVI5z zw?BXfX&&OLaR#F6oOu#_V_nCvo4Xjd4v~WCmA&~j7ps7)>-k-9AYd_wj(Fc?g4&Px z>;hN&iri#MrI6uD^Sya!H{O4~zH((Ff2xcE%Z`}(&8_rC%PJ)LDgDCTePqdvjeIMW zS~_5pQt2>BJ}i}53W=9WF{<7(LB}o6;f;o00z`{93^}x1#?(H8;cb*s#c{pq)v_|8 z13^bvP+mTV*WzmbzFCH&dtOmL#&YrzK9<-7hLc+#%je`S;qL(xi(&9tR`)rAza{uP zg3l8KEG|RozvsE`%=s*`+)m~Utb<_H2u)UDMdpfU#S^%?0V`^sj~zo>W0h`r&4!_9 zQSNer1>zae!@IQX3zNBfqTZ4$TyD6lgOSFfrv z>-RPprgx>$!!3+C)=)8qP*)qc%kp(6BTnAlmMv`USN2VUlVNNMOijHHq?SJc&Mzt6fR7AFh?; zlh_VJ96e0+i>&9$3>jD9%z&H;FKXhVZ|Vsy3@08JH!~*lq~Qpr!nmFe=?o>f1PL63 z7r@X7qSr7FdPz^CaQ;YqFU_nh#;IXCy{veW^xYw*eTuM7_g(utT7>CRzgkY2UK-x3 zXJ?s^MN8OD&H5ML#CyU?Eb`HtG+sh1<1Mf}tu08lS>K=?9~w*nLf zT9zJ(Nfx?{86m7p(hiZX#??8t><9{M&ULoTC+1E0<3-T=4OCxYQv-^YR&*!pi@2jg zvYrH@$vO+Ut5=YV^kt5C(q6&U=Rw-x#YZG_Om~9l+g${9F;SM$I*`YqoedzDGhRlz z?E&aRtXM6KRAg#T&*RyMm@lGTLXGMzrV$&!KrYQH&1@p!a(X8frdapjHwU9ot5P>BV=p?M5M~={wGu$ z*`C{Q;cY(UO}4bhC%UoCNKhdw^h zu4j;%Sltq`qeWit#Z~nV04(_?!$>rwdM8eF=Z*`QB^}n>A(N!)zp$Wd3H~cVoc#T$ zPy3^~q48AT!i}mI4BEZukK3Xf=L4btT#MC-A%%^FYFrxXG4<0ZgU7(9D{y_8Gd%VzM@~o)4&!Y{me= z!%X%QOA(0QzfH5q(+%>fJfesN|2ZWXRK16#yqBQ68u~i2yo!*0NsUzSXE?pJv@LFW(0&DsRwB-FoLue8bR8Q*jW}w>>Rxl zA4O;zh8bd^jREIsT3T!4z|#f~1-Hp~9xDn!d^vCA0g>>B^j;y@;ZX~Gs)8^ogMlkfG9EI&4OWqBnIn~+{pCO_rkK4uw!>p16OIgR$-j+5t z=wrTs(BH>ocWO`L5qRA@(s@Q{}?W znvg87BMw3`6yZ577qKQrXVFyLOyepyKe3~TOWPyvG@_IGta)|Bb?o&gE5qT|^#=-( z!EStKi}?9dL%R+2$34g<$IxFS{oROyn^$)4n0VG0uuc0-AXGp)4#=||1Upo4(IFTF zeHq!j87v5pZgnb%a}~$5-mqmPI01*W;u@D&=J_%k|J!W*6&p0(=cdgMQpQ*_wxsRR z<0%=1PcVtCX*c4REV6u>{4$BgX@21l#SBsyG{hVoT;gkix8b%fTtYJ~YHf-=PoCgR zrQJ`E!xQ2(4=pY}T63Nu7B+tl>swMXqp6272xo?5PkZ@}9)3i>!9^XL>lSuL(6@cd zwC=!H#^#G2b$5{XHJv2hR%b`dvifoKTqM0vz>T5`P3#LBJxx5hr%{k_;x7Urxew_u zX_+p64ClNl^R%+#fFF^m_a;Io=+j^XUqyIHhCl1zCv0>a{Q%S!<9vW$1=8HJ;RE<2 zzmcOw7xplN)1C45axV zUb1T!walnN{VCCmIU#GLxwN!at2Fc+sa{8*Q+|HgK1`p}w+xjW2a6TCmURfb>0;Al zqWz~URjgl}FxtI%x?aU(%saY$8+z% zL;1UObPA5};L3;*-nE2ww*2`<=YCx6CPv`*u)`v4bcVft7U%HE;uIzpr&6^z1&hTg zSS-${MT=9gSX}S7pod))CKC`o9D-4pSe(Md;uIzpXVjv_xebxU8NX=pEg^g>@NK62 z?FQGgNjpMU^2Ti5Y0Tzbc4jF~MBQ6*b+D6DxT1@c;)*#PQ|uT?HA(`f`(1}ic8vFY zVXzN*xKZnsu|g|o!zQiXGPy>C)zuq6SG>I9AZ!so_lE)gW0o!H+G)7sq@s$ou_W;Jaud5soPpkP3fj$ zQwyhjy2P!*WyP4Bz8*{IB2M2Befq}e(>Fz*zPbJMLdug$Z)v}vPv07S`nLAdvY{lR zYm9$Qs}U%)YPHBk6Yj!{cIC+^WI_0a;A(=s1lJJkBe<5}MFiIoa3!U>f#61hn+UjP zuDXTbR)X6IZYOvM)T>@hjDj*!!_)XY1Ez6L0Qt9wWli`yP$uCB9DKHU)g~Cx*I<#G zQ?>{2hOZQE=hum%Ioo=&yQbTN&VDKT^k!6M2Z^n^EiUDU#j=bg+}`r% z>%E`F)h>F^4i{~}PsqgrJz=BuY19&@{!IdJwO+j#|WTkn-2d`}2p2;o-&zuMfpmkw79y004xgeUu$-Cti}!1Z&&d6vn-fy6q8lF?NN3s z+2W%hd_CE{cN5PTuYx&LWSdS}IECX3 zj?*~K;y8oj9FDU%&f_?T;}#s}aomdI796+XxE06kIBvsn2aelu%;UHN$DKIlaomOD z&Zm1t;qN|*jR~=+3l25D8oJ}K1BZ<8^d@{VTOu#4!Rwh|Yee`dWX$h7EA-kUS%pN& z<4qXyf?GqC&sECjvAcmx%kT=*@NH`6k6z5$k{SsGreXj$6-vrlK^?7%g8cAReHwYQ zp_2?<#WaF!**K=x$rkIUrDBYHyz$mkbVPX*(6D7!=R!wdoQJ^Hsn(PDXh!`UPR|7Q z%g9f*Z{hZknP4MG^wZ<)juC2ZhQ-VBAkUvXlMr2g=|tS1X2hsnp2IY2in@-4UFuFy zvgV+d5Ti>eSgfXZhCILvU0;hj$Sg8DCc9uPGI@wVwuCrL>>h%9362oFl;A#s`w1Q( zI7-0hQZFNRjNs)24-!N%H!X#afw|qc0{OSF6q27$jVJytHIew+)MVmwsj0+gQ`3ph zq-GM&re+hLPR%9$CN-b<>(rLSU!}Gt{xY>K@u}4I#9yR#B>p^=PyAVGXW~y&yHbDB zj5YK_qAVC9U%SB&S>BNhf<@HDz9<vPhkUzr#uFYvw5d#PVucjM#^ zyx%wTHQtXK?EGfuW*l(_O7wa0__1UPj+Q^)<991gcQIM0VChvuyqS2Z5r-@RH$%pg z7E+kL_rsr9Ii{JfWtC`L4;EUt!bkiE(fHIUFz0|`9>cpACXL*1DBPf^fCMxAkPKlQaRW5u`1 zx7^bkFqU+MG@?6w9!C@Z04>p9-!bdRguZxX3C(Ny^XcmlPIsX%ID*6hW4*|9#>8#W zI(&?KoE=|BC}FLTktGpDS5lgCVm4;&u(ERdLj}=+acN{PKWnXtnHU5hd5IC z96CfBRg?=pC@h5E285*QvckX z2sgy1f=%tjDkCbXScI0;-km5OLEHX{M8Tn-6KjIw(KCKBnyfdWu>Ar-_ z60Cs{gZQH;{3DGDzV-o7s|k-4nhF;ePvS9g8{PnY{+Kh+Ax{0PWMx%h{ehwTH6}}V zq{ueW&>3c%-VLNV*V!hY+UMhCo|q8w?BcCB#mb}?#BiWbREI1|%fxyht9pGa_4Dlw z1>NiI$rQdsh}#Idv&%bh$Ct=1+ksdIgr1`w@15mrbszOOq-s(qs)KzfKMvsMt^#yn zAMIX;#s(=ZIjh+vMN8=LI+X@QaT{wN7=5Z7La}~BmGJM#Z+tkKJ9KQ zzE+}-g69?56>;5W?{+80GFm!=2Dy`HnI=bu7->^dR6BEY5Fy3Q0sP!Q1c)F)dS|+S zdXcj++V^FBTr@H1<3hZ*A2=~&a68AtJoI1f&h06L)a16Lj5IJ@63Nk|wQVxc@c z*K0P!(gV;ay5UQ_n{@LFK$_!zS@AS$V_a1N?~gQVB2FE;Oht9$lG2^u+ojd z?`1=WkfS*kG}I^VO$p|JhQ%Q1-d=XLS|d0|AY;47iLDa^jqc6~XVKv6$q85BE^>n2 z+A41NCA`6V;(6n*QBWsuAgm6b!3B+`gx32)rmw~sNHl~zeGt#~lPmfPEt~jHuIN14 z1FUg+fHNqE2DRo0jq4B!jguoXpnm2dN5p?{MdzP4-qI~*u5zoHs~kF}jyFon*h5h` z1xrhbKYJPaAT*{EALu68#S|x2OO^CSL9c4W;#i~}W)X9LJc~dJYu9U?1yh}>Zz6~) zoq}7j^e2Fju5IZ)U3ZD07Evt*M=V{7O`=$dOF)O9_CIwe?!ZXjEScBQf3GB&??DwL z^PEJH#*w)(Jr3Kq!mJuI&*0IRza#t#OH+LqzF-#GI1sGl<`@%N;^HJWJej zPRBi~53rLs`(uZGl*J;In_KL5xn@ITcc0xE8P@0fne7J%4gz${fp^I63}m-NoeC-b zI&TzG{0(AJ#6m&y4xbcr=I(exmY}zQ^L2 zfMohE)_ep=bJW)C6X~Wb3lX^|SP$z~ij|3_{0x)i8uh&d-Q_9Y&YMVRfn}kI3?{DL zyO}vUhzfUT%i$g2@EyOVwpmg57m6tOTY zEA{lcL`y?gu~B$p|BrYFNsR6fiV>>!ZNzL6U!9}}1fa?84u{OF$ zbSSUJQ^BvIqOMe+?L8}4s8_9EAx^asNnA6`gusZjCMr45+!M)^p}V+ zAWlaQ+VEW+8|xE2zgL97UhMjf2`%&S3z=<`$2`}K89qU3oJTF%WtQw*B=uZ#x~&&{ zR+-(5XF+6W#1n*O~mf?t7h9hnnjyM^P%t2fa>@;qJ7qm^l;T&`6!C>@OgDiop#%%}n zU7Fc+c{E|C&9X_oR5GZS!3(wxbNEOzY>Fonr+cwXQm;^f`RZ`l*sua zooy>Fo?-h?Dj8PQ0T}Bm%p|IwcGL^_%&xSv$a_mHGWTy?Vyr&{9n>JQ>4X?aUD!+`)oI4leZ>J5K28{s!r@9PaDPL292Y(S>)=83vJai7vhx zDL9wtVss(+1ZIi2XYHhubH;GQN8DlKOvnQI!iY?N&{E-dxQr9=IXsN^l&deYuV2GH zd^@rC5*#G>I>F|qeYWj?2y-m2Hjw6IOB;OB-P9Ug8MpdP(LJ_>PtUYATc#?h-u11h_+lS1tog{bs(F+K~>cSZk zs6L~mH9k0L>@BXsB0hp}%ppu(a_fju;GEzjP7R1ot|rwN^D)uC{vUC2t_Z%L1Hx70lR~9;m7L|WyN3nS6W+vT zK2oUf7Kdc!Hu?h7q5JEx1(Xfm%{xOupD%Y z#lSrAl-R)47eI-eh$pSt*+fludNz?brr%+E_sSO$xuXF$P#ZJ!H*Re29Q$#xthkTn z5|%``-WZ>|xD+}SB1!cmUXKx-Z{(Xg$Oi-B{}K0JwWYmzp}lZ50R>JV1|+CV)&uw> zXraVh`{hXT;5#aiJbnR4bFTfdo2kKC;(rx&iaj7Li6Bw+ zX11w2aegbZ93yc~qFX2|15pW&AcZ7zbdY8Wxewsy{sh3LNmJ`=t?4nN=#!?^=Xznb zi3-8|2RF=&iW5njIFSK}+F{=oV4Mhqr;|mwj1$4f2L+!tdVj_lcP2pMv(6;+|CDU5 zQJ4^FUS-q+zICoYy%?tF=kR=Han@<}=1GEA69h)V`)~bG#_982LhUW{ z%eJeg)5>1IT!q@y9#P;&Xep~YK$j%zrDYS zA|QwtQhMRPOk}@}j~-AxRWzBp3fF@@vfaHp)E}b zCf>y)mPQGKjI*Im=QC&z`gieahF7~Y&AX7VLZ+d+Dqxx%m`9>TtW;;d4%Weanswd- zB$9RPG-RtARr_MBU&lI9!>hWj)6@6mfD!1);ldB*=a+F|m6$zj#$bPFe8R|nVIMJ& zvtW=>MFC#uCxe6+RqJm$h&>|mvHm zCS>FUE+R5Ze_kSJ%zU7L(#HvcO(&iJ2FHzf<*jX zY=jK#Msy>5!rhcDECSqbhWZXx6;N+LxB<)2_z+7_y@i>(Qz~EKlcZE@3)A9gnO7vO z_Cd@uX9um4!4BZJ@i$_Fi68nOIwpQVg*Gfuz7`8Alet73u`dm%ZP-gaymWsZpMx4W zw^YQATrInzbyyG`q&2)>-!AU=rOKE3bD@4DxG=c^iD3k}ZW+Ex$z(vS~qKBi--X_Z zY(53!C)j)ujB}mM_o=vxuS-<7v|Z>~2v|hpceC2>A$T{zdkEf3@IHd>MEy==`6&{0 z*wUdM1rl; zA^5ID1!8n=Ol_0+?0cxJJ%I}qUo92A5=UNt=#$rUOA+y;<&*jebPo(e^&4!bfT&7H z1b@s*9!B=&RBI(Z0Ss?Oj0IY<``dj#YWiKiUEZKwKSo&bO8gr=itJtAsP$fIBa9Pm zj98@kgpJoseBv;iI2pWUj6oo7@WtrGU@J3i+ZOQ2*3jx2vF}4JM>EbqOMBG2*j{4x zkGTH=C~Gc2*39DV`vvMcLKL7rfSV%ulfC#E|NYN*6cSGZv4TmiUCSsihL)xA_n&6G`!X#JZEr_ae)u zNHV-tlgzjk1H2YbG<3svCmLgS4}e^3U_+cp$m+$vN9&D)T!2}~nI#AKJiK0#xoN4O zp5a4xef3}UU;RvLwa@>(+jbfaXVwv@A7C5egu>VS_iZ+vv`-x@JKU61_%JKEmBiTn z6@NeSJ=gk*{fT#8i+{y*Qb)XE#)|4l>V6Z+$yYpRa66MhBbX*k8s~M~C{36kMH41S z(S!+79JU218$U{)NuR8c39fMpK1gv2K1gvAK1eZ=G)Qqbfk9)c%Pz){Tyth1i2aMj zU>d%^T*7`%r|S6HO<}kiBu*R-bd-oH#-nx0EA)A*k6HWq#1l;OYmc?R>1oDx-98cd z4n9%(Z{{nGA|}4}EwHt;#Vw*uf2rP_z}ETt z%k(Cl!vvuDG`%@9roT;(tujl>oSHTd_qacsDy7SS)61Mv^_6DXWX-shDk?Oo^fETp zvbb71wQg|i4_T_!@p%zl4xNCQ>Ti7-^+PzTE)e`M!H*Dpgy5qD;?en0Vn0UkF@ld1 z{5XNQ6{EVHGJ89_U8edc!rksm>n>Ys1=gIrSkdk5dPQ>Cpv5B9H_`0w0^tYocmsF< zKi<}UF*$4q2DmXHc-PT|`&0tWEM(oss-^1EnR0D?UtQJ2f8?culZ;C?PlSrFGc&;; zfjojlDu{tAk1xI{(OuFxIp&ifg>hho`JmW+<;NTQoHERKh3}AnNG6ye7s|lBsa`v{f^RtR24@GS%~*C`>nA7=7n1V2mg zYXrYT@OuQmPw+{CX9>PQaFO5=!Pg1s5|vFp)fBOLf;|Lx5F97qibTCqkLy|#1Naq# zn{_a;ON{YRieVLs)6g<;r>Df|ofc27I4~5)q>9$NqQR%Az!gQj;>!yyJ=(^mZ3plM z_ty|dTrJ;;3_WfGAk&lSJ%rUZx!zPFmB=M>W4Wo^Z0_dV*4)*(iQLZIuG}" - - def to_dict(self): - return { - 'id': self.id, - 'created_at': self.created_at.isoformat(), - 'name': self.name, - 'description': self.description, - 'link': self.link, - 'repo_owner': self.repo_owner - } - -class DmpIssue(Base): - __tablename__ = 'dmp_issues' - - id = Column(Integer, primary_key=True, autoincrement=True) - issue_url = Column(String, nullable=False) - issue_number = Column(Integer, nullable=False) - mentor_username = Column(String, nullable=True) - contributor_username = Column(String, nullable=True) - title = Column(String, nullable=False) - org_id = Column(Integer, ForeignKey('dmp_orgs.id'), nullable=False) - description = Column(Text, nullable=True) - repo_owner = Column(Text, nullable=True) - repo = Column(String, nullable=True) - - - # Relationship to Prupdates - pr_updates = relationship('Prupdates', backref='pr_details', lazy=True) - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'id': self.id, - 'issue_url': self.issue_url, - 'issue_number': self.issue_number, - 'mentor_username': self.mentor_username, - 'contributor_username': self.contributor_username, - 'title': self.title, - 'org_id': self.org_id, - 'description': self.description, - 'repo': self.repo, - 'repo_owner': self.repo_owner - } - -class DmpIssueUpdate(Base): - __tablename__ = 'dmp_issue_updates' - - created_at = Column(DateTime, default=datetime.utcnow, nullable=False) - body_text = Column(Text, nullable=False) - comment_link = Column(String, nullable=False) - comment_id = Column(BigInteger, primary_key=True, nullable=False) - comment_api = Column(String, nullable=False) - comment_updated_at = Column(DateTime, nullable=False) - dmp_id = Column(Integer, ForeignKey('dmp_orgs.id'), nullable=False) - created_by = Column(String, nullable=False) - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'created_at': self.created_at.isoformat(), - 'body_text': self.body_text, - 'comment_link': self.comment_link, - 'comment_id': self.comment_id, - 'comment_api': self.comment_api, - 'comment_updated_at': self.comment_updated_at.isoformat(), - 'dmp_id': self.dmp_id, - 'created_by': self.created_by - } - -class Prupdates(Base): - __tablename__ = 'dmp_pr_updates' - - created_at = Column(DateTime, nullable=False, default=datetime.utcnow) - pr_id = Column(Integer, nullable=False,primary_key=True) - status = Column(String, nullable=False) - title = Column(String, nullable=False) - pr_updated_at = Column(DateTime, nullable=False, default=datetime.utcnow) - merged_at = Column(DateTime) - closed_at = Column(DateTime) - dmp_id = Column(Integer, ForeignKey('dmp_issues.id'), nullable=False) # ForeignKey relationship - link = Column(String, nullable=False) - - def __repr__(self): - return f'' - - def to_dict(self): - return { - 'created_at': self.created_at.isoformat(), - 'pr_id': self.pr_id, - 'status': self.status, - 'title': self.title, - 'pr_updated_at': self.pr_updated_at.isoformat(), - 'merged_at': self.merged_at.isoformat() if self.merged_at else None, - 'closed_at': self.closed_at.isoformat() if self.closed_at else None, - 'dmp_id': self.dmp_id, - 'link': self.link - } - -class DmpWeekUpdate(Base): - __tablename__ = 'dmp_week_updates' - - id = Column(Integer, primary_key=True, autoincrement=True) - issue_url = Column(String, nullable=False) - week = Column(Integer, nullable=False) - total_task = Column(Integer, nullable=False) - completed_task = Column(Integer, nullable=False) - progress = Column(Integer, nullable=False) - task_data = Column(Text, nullable=False) - dmp_id = Column(Integer, nullable=False) - - def __repr__(self): - return f"" - - - class DateTime(TypeDecorator): impl = SA_DateTime diff --git a/migrations/__pycache__/env.cpython-310.pyc b/migrations/__pycache__/env.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6fb27f85f28e4210f412a115b7bebe4dc0613c13 GIT binary patch literal 1963 zcmZuy&u`l{6c%M!vSryxleQanD2QTRXP`AUV5ealmL}a$UAl(9~OM4(~v)26b!c*U~E$fd)2OisAN&U?~Ab+oqs`!`-wp++!K8D#AlVdK~ z=vWo`C|hZRSy2eEZs-Y7jE-_<2qlJqDuP9ZJvJbAL_z3?7KX`_{WRIzIo!Fkb8vU` z@WD5)diqB}L^fgh*`QRS3KcV@l9^3n^&k+SpEnoY%dV5c1sw7_C}-dW-gDc8@WA`n)9;@`iR}9Wd{)++NZ7{)bNVlFy zie&*BiSN2n-hwSfIi6f-FnAqmSx<8tf*)}C;t zn9J5}Ic1m@@}|M;gh?zJqmaekot^vluqp5nxH}=Gd$|6-N?fIVheY1oH- z?83908y9WNtw(K?3tw2Bi>NcjX9gFgnU!YgRN00dR`HA|l52A&Sk;syXS5$w4cq7q zL)4g=bI4R(!3|SHjE=aZx=JV~f@Nk@%#7aeM^#uCyV`KO2$$9ZEm9#ws@mtRP+dE} zbi@F!Bcq7aB(v+Po+{<&qO*OZ5?J}Gr>?-ytNq0P?}?th)Mkw9yhnHz1b23`3j*$aimu!oT7uM%Uk{ zcfjx#^nFmY;d9Xc1q9<6H1`dfHn_L&DKf#dN&Od%XWmn94p4vj(KBSCY3nI6ZQ6JN zH-m}hG3XN~b(UlZR%DQyK!scDQ~;`+TWN4$sq@wx*6*#aw`=;%CqMrmZ{&Yyb58G_S*AIilZ=ukSVP?w^gmEi=w{B^55d=8&j~0#Tw0fl>+e(nCuwmZin@E|@3*6!}!iq2w z5T-Fb)iOQMGb1oG#f{X=8bPC$&D08P)HPBkYX(i!S%C{J@R)r-m>IOefEF{)^gV() zr`C0p4s>Ykp~sm~Tbi|{`wxqYmLBt1`=c53Q0lNQN_}RYl0pB&o1;9bk|_1FM54S8tb3cI^BBum^s)EhB!N^Z2{8^YVj?%*e1 zF2a=nF2a{$Bf>)v@2a8YXFCe>?x`=}NcYMTfc2sA0 zL;Y+icSBTMb}5bWyr}At5|naV*&PK<1**V0p}$M0vv&@#xtkQ8t5*|`5G>NOJ>Ahf O%`+ZokMVPjj`a)abKj-_ literal 0 HcmV?d00001 diff --git a/migrations/versions/e7eab9b29e41_initial_migration.py b/migrations/versions/e7eab9b29e41_initial_migration.py new file mode 100644 index 0000000..5002f49 --- /dev/null +++ b/migrations/versions/e7eab9b29e41_initial_migration.py @@ -0,0 +1,30 @@ +"""Initial migration + +Revision ID: e7eab9b29e41 +Revises: +Create Date: 2024-12-06 12:21:44.174579 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision: str = 'e7eab9b29e41' +down_revision: Union[str, None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### From eb210974caa453c71479252512825e46fee65f1a Mon Sep 17 00:00:00 2001 From: jaanbaaz Date: Mon, 9 Dec 2024 11:27:34 +0530 Subject: [PATCH 17/32] added migration files --- db/__pycache__/models.cpython-310.pyc | Bin 44153 -> 44176 bytes db/dmp_api.py | 36 +- db/models.py | 1 + ....py => 5709cf5c2772_baseline_migration.py} | 10 +- .../versions/947179aaa97c_version_added.py | 1004 +++++++++++++++++ ...5c2772_baseline_migration.cpython-310.pyc} | Bin 913 -> 915 bytes ...947179aaa97c_version_added.cpython-310.pyc | Bin 0 -> 32678 bytes 7 files changed, 1025 insertions(+), 26 deletions(-) rename migrations/versions/{e7eab9b29e41_initial_migration.py => 5709cf5c2772_baseline_migration.py} (82%) create mode 100644 migrations/versions/947179aaa97c_version_added.py rename migrations/versions/__pycache__/{e7eab9b29e41_initial_migration.cpython-310.pyc => 5709cf5c2772_baseline_migration.cpython-310.pyc} (52%) create mode 100644 migrations/versions/__pycache__/947179aaa97c_version_added.cpython-310.pyc diff --git a/db/__pycache__/models.cpython-310.pyc b/db/__pycache__/models.cpython-310.pyc index af2b3ed398d6aaaab546aafe8d64a920bf78c82d..04005c38f8303e70f1f4bf20f5c6bfd9ca5663dc 100644 GIT binary patch delta 3920 zcmaJ^3sjX=7QXw3`}4RL?&YPZ_li)c#6wZif>cOF1RX#gA}SBzDk5^>a8bPY!WSyO zU@K)R_(sfX=-RSoW=*x0qchWLO-m;=rdH-OlUABpW}1)L`}`<2)5KbQcmLmwk>X?b@$uHt-?4+!=DpbEb8R>*!qA33Xxi_MTPGTEXl%5_=5F&URZ8`n zm3-e)wbWycF5gt@S?WcrMFOhybr{k3}|7Gv8i` zU0xw*8R=#0L!mr%?%=B!!8L4T9l!}Z66b`oSUtf4zr)0*%i!Cys_IxF>Ni48fC$Mf4nP_F~g77%nqh!Q!MS{U;=vuqA0Ed?KCvGwCeABs?+x zE%-|ENyRM_GV${XfkD^V%7-C^!I$A%$>d8C6L8qXQ25f7IdLvPA-pJLeFbx(D@WrCWoPYQk<$O;)9^5kQ+2Jom6VfBs|;%Blv1ry&*aJrPKmAuyJw< z+{O+kWPLgaV17hwZc!5!Q82P0bWIq^k+c9#Pm@0bzarm z=S_7^1WP5nUv#hyfLi;d^rm3-O{W>dIK5#83$(% zQ4FkWaV^N+2Ji#k%@LF$du|l?N$uPCR_?6+{v0!aL8i>eJOcSK2)|tz_N0l#E6gB} zCU_%^ia(V%7dVx;o=V)*OQ?DJbO1A6$vXgjWrzSgI-?ANv1nni+Mk^+;X?85L!z4- zIqgr&>WA+n>6Jdxi)&l{aZuxEO2px7GeU8}tQfH1OS9fGTG)Ihv#Zu&>FjuAD4?Iy zf_wekVKn|>c2@Y~8l)!c+{#w6SO>CX(qZN7;p$M9B^CVu5(i;(L98-BOV-7L9iW&2 z?L&v*;i5FdTuFxFIwP@RW-z`}6anaud(wv={#1i4AWdfPZ9LZfxTwo5U*%3R)N zE>V)py$Yjif7vpC{y3<71n6B^K#^#X~&LvK~KO zkk~(+#X45VU@4Pf8bg-c%`VKW_-v$fZ(^lik0JBl6R7#60E{X3eJofQ=ivT=(2)Nd zlUqFn=4^~54(lbRV>k{>I$yqQq)mfFfTKS!-TL6%ZBb=ka z4d*+v!Ok}7WOp+6#*5D1>MO`(B<`pmrc`Jqm+I5Nu<#)(Q7N~o3Uijth8lD&JE^at zxmmb$d8|^US-iJA5CHMwidi5erB=*enNHeASH?pl{%hqG|M?I7MJPS1lnd;@dkqeJASjOpW@Iqk9+6f)#U&uv3bqbK<*pe#uEF&g|xSqTk@z}JSr@G=l8Zs};b6RO zGhq5Cfl-Y+p&Cy$PNB^YSRYLVmbCspT*t%>e^cwp@Vq-#$A@6t#sH=M5k<6Z{F7SE z5yr~XDW|EJdh3a%a_cf0>^2tHI?C*#P}nPMHQgRH#J#ZCA(l(d0XTD04y?egO~D4% z7AqMVWX&uk(H(!<6sj!Mn8t5@|0%BJrK*i0nXT-Wst5V_a^cm4C`ecH;_Rl-r8!#G<@jOi_;{Hx`HpU8%ii^J(jru?3;g1| zytX*e{z6%dG?gcxKZmmYuO}hX-^#OEH`zgAIYzyB3D%-%=ODv+wvj|P^bpP4nQPiE ziPDtQH>buXu)%WxUK`&RzuBpVZK2_Ab>4hOS?wIH%ly|0w#u}=fN?LS1hKN%#vps0 zwjfHd6;F0P;k}k7)=((AQ(G&8z3C3_=|;B=+D5+qn0M``yt#Ijn~y!BW6CX=VxNcm zCP!l4Zhu1ydq#%sGU#jAvO7!FMtK9nnrZF;N8kiKJP-I<({m~`Yp+Mzp0iLu-8&F3 z&h*0xZBcMZ*8EA_(w0K<`L;ZGK}I-@Q`;x{UE+kw^KlhRUOGI|Zh<|rW_D1>H}Q-1 zP-TbaeDK~=@DX0!8xJ`+bYFz}F|TGiPTx1*(nh*xncl)AE0i8Cv(!^XD9u z@AcZn+yCLUdoP*~zM<}83(fOgynHZ$JU@9T91cs*4>Zpehn|NHeC^PXfrm)@=Fj~- zO7J1M4ww0GIH;}c6=b6;>c|3%>v)F;jN+Fv!YpJW8+m=}8TeC69A$W!;TXd!46iac zb@)nWU)9BOliZgOjO@${|8=XL5`5g0Zyra;izj~F84E{T1CM&biZe{$Lxyt<{M{t@ zyGPt$;I9n9UkN?Uk~#1VTJVim@Lf|xFz|&&@O?z^%|f)W1)sTsPf=0D(8gnYbO}CM z1RoP(DbNWdxIv0>4BUbQzqRyQ5O6^YE-}HiBX}zX=U*HF>=J$O#>-=20LC6m`44ad BD5U@Z delta 3919 zcmaJ^2~?EH5vB&_M;MObMqxx`Q9xWk7Z(S)L~ePAASi+n1x0R}V-Z9MDj*&RB`O}E zAR-?7V8=~j@{Ddw%w~O0Vd&EzW5%W{dZS=Rb5qG-Lvhw zZ0MTIw7}HVSdahfYM;L{uix~6H)LafiY-1=+oGG#RAq*gVYpr|#Vlh?h7teY9E*JF zVGj=ZbU-_{O>@WJOqzgw5pwhlF>g`J@#Qo#Gzf6A`V;5e3VM3Q)_gEV&j8G1n(*o6 z0KdS~A63BD*dJ<-`z;LMBqsZ2!8dru*I#~#%N}l-;R=_qxkaT3ozV_(Q}g}DO(6Z1 zxWX|nHww?p{GD`}OP&MtY;?jNwG~_z^4HYnAbSHi;NJ*Wbp~YEJS%?OU$_FhMsS@# zBDg_t6JO1mEWg5skI*f^8Lki&=GiLoI}dYBT|hs8i^eZ#4!|ue5B8U8Ny%5oJ_&CI zFBMCkN+VkOC&3+pyI3CLDgTxa&9FCQI(#dfd=c^uKsa6qJqbVPSfa3dPQ3fSNb4TK zeS!xB4+%^OA_>e0enbeEC?DWMHHL>d!+;>(5Eccn1TTg!Q3|3TbxQx%*-6Kgh^3Nt z8P7y`$_Ken5)MX;g+W2$7Fh~VjO|g8@Eq?%jpryu`#>4`MaL6~^ka4@p3t%f!DB2ea>es8Nw5(o&SjXjQw5vxv$=`Ve`xtNQMkO|!)n=2 z%ESyzfc{KijLbxzy%2s?#qI|ntU<}IR!{l%HY%9ja`@$O>G*W3g z<5vJ&N27#XFePrDLQLqE8*{xsVnE_#FvjLOm8Ldv8^9B^N@6@*bxEF}6vEH(&q=8c z7G%VdK!neP3|rBsHDJ2hS#HM1cQG$n1!kPgTGNrd6u`V?Q5V<>7EAnMQI?Y(Y1k8t z)`BlljitHOq9APeP=*C5E?`4U*pY0c=}S2ZFdb7<6Tt!dQ%_nrko;oOEFy3OEK5`4 z=V>a%41NB+mQPkp#{QM|*tggaU!{4tzM~KPx^eAjY{?7)H(~o><{lXw*KEq-AsLGo zavU)s+ZGHo+1dV}2qaUh$!HMv<=9KX^i5JKMFCDE#oM4-Bl8U3rc?9tU^M#Xr zn3a25DX7dzXp~PO!Zu90CU4?9q$=8pB6pI*`?#q;v-#RR1#n~a=#{?!0!DI-o%v6| zpWEjtD=^?C`guW?LPQQpae?scZx~jXCTV9yu_SP!t9Y@{1`>2cvZ7mX55Fr~4k<$E zA*Qpns$?$t56UO?9_r*KGTq@!7P`vfyDpE(mN@s+0m6G_Hw-f}SEQHbmar*&qF0}# zp+kwcL`RGnV|+;fJjJGxsbGkwN{Xb{G@UN2Dlon@PPvStDte8G+;Z$Ioop+lxGQ>0 zY)y}3OtgV%;j>RQR%MZ3xsay4__w=0^C96O{B4mlIAhZ%G2@14LY)2z8i}SV4pO!- z{sCH)$G{M#l?S_IavnQ_w|bL_ObJTSnTC5)rl_l1C=MMRHYU zgwHDa)Lse*nEl5?8W-p zWWG^;lQ z>@jA;6n<0C-p>)+I9(6^Y(w&N(qJD`%tcl|q0jdtX;3@mBp@UPT@aQ+e4gYexTf(BQX!EBid!jn zQ#zRAg-zM824`+|fGYA@VJYVVZQR8QC(Peuhq(D8*R`b7L2h>vC!bgt{TS|P*M8Ah1gg|#>I|0(LaC?G6tch(ja&CKBz4qWYaOrty)~bRA zk?r8t0WV!M3TG>bER|ph!DxW@QfNX5csFWO!AdOKQ7DyB39Q0mA0+hdJ!%f7=rx$z~|SQBc2i`zZnwy5c|*xeq<#}C>U!VbZC4l_E! zY;RM9#LZeuBjNl`z+ZPbVupIWd^Lm~zpaF=e9&q1ze(}n-~8EM)-pg~t4EW*&P zsc?Y2mF|-v-n+Y^O**-%*3m9)SUdQ{-_iV_9drmwa}Ms3_LI;k;Y)mT(4Q0fce_Hj zAbhPQT-QBYIz+O5)EZR>`S3AqV&rBv8@-tia&liy&>>TRR*dbb0te0c9z)>2jCk=X znTTIK?W%#`Fp>5W93ePLaE#zMfvAs90Z;eZL8C~=c%0q4(BuDWjh2>HRo18tKo6%h zMExT^(4+A^G6t$Ikq4Ivt`N` None: # ### commands auto generated by Alembic - please adjust! ### - pass + pass # ### end Alembic commands ### diff --git a/migrations/versions/947179aaa97c_version_added.py b/migrations/versions/947179aaa97c_version_added.py new file mode 100644 index 0000000..9858072 --- /dev/null +++ b/migrations/versions/947179aaa97c_version_added.py @@ -0,0 +1,1004 @@ +"""Version added + +Revision ID: 947179aaa97c +Revises: 5709cf5c2772 +Create Date: 2024-12-06 12:46:09.193639 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision: str = '947179aaa97c' +down_revision: Union[str, None] = '5709cf5c2772' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('ccbp_tickets') + op.drop_table('pr_history') + op.drop_table('mentorship_program_website_commits') + op.drop_table('__community_program_tickets') + op.drop_table('chapters') + op.drop_table('__dev_onboarding') + op.drop_table('discord_engagement') + op.drop_table('__mentors') + op.drop_table('connected_prs') + op.drop_table('github_classroom_data') + op.drop_table('pr_staging') + op.drop_table('contributors_registration') + op.drop_table('dmp_orgs') + op.drop_table('user_activity') + op.drop_table('contributor_names') + op.drop_table('__mentorship_program_ticket_comments') + op.drop_table('vc_logs') + op.drop_table('issue_contributors') + op.drop_table('__contributors_discord') + op.drop_table('app_comments') + op.drop_table('user_certificates') + op.drop_table('contributors_registration_old') + op.drop_table('product') + op.drop_table('ticket_comments') + op.drop_table('github_installations') + op.drop_table('mentorship_website_contributor_project') + op.drop_table('points_mapping') + op.drop_table('community_orgs') + op.drop_table('__mentorship_program_selected_contributors') + op.drop_table('github_organisations_to_organisations') + op.drop_table('badges') + op.drop_table('dmp_tickets') + op.drop_table('__dashboard_config') + op.drop_table('user_badges') + op.drop_table('point_system') + op.drop_table('__community_program_unique_user_data') + op.drop_table('__community_program_product_wise_tickets') + op.drop_table('dmp_pr_updates') + op.drop_table('mentor_details') + op.drop_table('mentorship_program_website_pull_request') + op.drop_table('mentorship_program_site_structure') + op.drop_table('contributor_points') + op.drop_table('dmp_week_updates') + op.drop_table('dmp_issues') + op.drop_table('dmp_issue_updates') + op.drop_table('issue_mentors') + op.drop_table('unlisted_tickets') + op.drop_table('mentorship_program_website_has_updated') + op.drop_table('__contributors_vc') + op.drop_table('unstructured discord data') + op.drop_table('__mentorship_program_pull_request') + op.drop_table('__applicant') + op.drop_table('users') + op.drop_table('__mentorship_program_tickets') + op.drop_table('__comments') + op.drop_table('__pull_requests') + op.drop_table('__mentorship_program_projects') + op.drop_table('discord_channels') + op.drop_table('__community_organisations') + op.drop_table('point_transactions') + op.drop_table('issues') + op.drop_table('contributors_discord') + op.drop_table('mentorship_program_website_comments') + op.drop_table('mentor_not_added') + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('mentor_not_added', + sa.Column('id', sa.BIGINT(), sa.Identity(always=True, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), + sa.Column('mentor_github_id', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('issue_id', sa.BIGINT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='mentor_not_added_pkey') + ) + op.create_table('mentorship_program_website_comments', + sa.Column('comment_id', sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column('url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('html_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('commented_by_username', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('commented_by_id', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), + sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), + sa.Column('body', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('pr_id', sa.BIGINT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('comment_id', name='mentorship_program_website_comments_pkey') + ) + op.create_table('contributors_discord', + sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), + sa.Column('discord_id', sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column('github_id', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('github_url', sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column('discord_username', sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column('joined_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=False), + sa.Column('email', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('chapter', sa.TEXT(), autoincrement=False, nullable=True, comment="the chapter they're associated with"), + sa.Column('gender', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('is_active', sa.BOOLEAN(), server_default=sa.text('true'), autoincrement=False, nullable=False), + sa.Column('country', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('city', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('experience', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('name', sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='contributors_discord_duplicate_pkey'), + sa.UniqueConstraint('discord_id', name='contributors_discord_duplicate_discord_id_key') + ) + op.create_table('issues', + sa.Column('id', sa.INTEGER(), server_default=sa.text("nextval('issues_id_seq'::regclass)"), autoincrement=True, nullable=False), + sa.Column('link', sa.TEXT(), autoincrement=False, nullable=False), + sa.Column('labels', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), + sa.Column('complexity', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('skills', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('technology', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('status', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column('title', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('domain', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('description', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('project_type', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('issue_id', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('org_id', sa.BIGINT(), autoincrement=False, nullable=True), + sa.ForeignKeyConstraint(['org_id'], ['community_orgs.id'], name='issues_org_id_fkey', onupdate='CASCADE', ondelete='SET NULL'), + sa.PrimaryKeyConstraint('id', name='issues_pkey'), + sa.UniqueConstraint('issue_id', name='unique_issue_id'), + postgresql_ignore_search_path=False + ) + op.create_table('point_transactions', + sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False), + sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('issue_id', sa.INTEGER(), autoincrement=False, nullable=False), + sa.Column('point', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('type', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), + sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), + sa.Column('angel_mentor_id', sa.INTEGER(), autoincrement=False, nullable=True), + sa.ForeignKeyConstraint(['issue_id'], ['issues.id'], name='point_transactions_issue_id_fkey'), + sa.ForeignKeyConstraint(['user_id'], ['contributors_registration.id'], name='point_transactions_user_id_fkey', onupdate='CASCADE', ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id', name='point_transactions_pkey') + ) + op.create_table('__community_organisations', + sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), + sa.Column('name', sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='organisations_pkey'), + sa.UniqueConstraint('name', name='organisations_name_key'), + postgresql_ignore_search_path=False + ) + op.create_table('discord_channels', + sa.Column('channel_id', sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column('channel_name', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('webhook', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('should_notify', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=False), + sa.PrimaryKeyConstraint('channel_id', name='discord_channels_pkey'), + postgresql_ignore_search_path=False + ) + op.create_table('__mentorship_program_projects', + sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), + sa.Column('name', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('description', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('repository', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('product', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('issue_page_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('isssue_api_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('repository_api_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.ForeignKeyConstraint(['product'], ['product.name'], name='__mentorship_program_projects_product_fkey', ondelete='SET DEFAULT'), + sa.PrimaryKeyConstraint('id', name='projects_pkey'), + sa.UniqueConstraint('name', name='projects_name_key'), + comment='Selected projects under C4GT 2023', + postgresql_ignore_search_path=False + ) + op.create_table('__pull_requests', + sa.Column('id', sa.UUID(), server_default=sa.text('gen_random_uuid()'), autoincrement=False, nullable=False), + sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=True), + sa.Column('api_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('html_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('raised_by', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('raised_at', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('raised_by_username', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('status', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('is_merged', sa.BOOLEAN(), autoincrement=False, nullable=True), + sa.Column('merged_by', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('merged_at', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('merged_by_username', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('pr_id', sa.BIGINT(), autoincrement=False, nullable=False, comment='github id of the pr'), + sa.Column('points', sa.SMALLINT(), server_default=sa.text("'10'::smallint"), autoincrement=False, nullable=False), + sa.Column('ticket_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='pull_requests_pkey1'), + sa.UniqueConstraint('html_url', name='pull_requests_html_url_key'), + sa.UniqueConstraint('pr_id', name='pull_requests_pr_id_key') + ) + op.create_table('__comments', + sa.Column('id', sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column('url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('html_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('issue_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('node_id', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('commented_by', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('commented_by_id', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), + sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), + sa.Column('content', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('reactions_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='comments_pkey') + ) + op.create_table('__mentorship_program_tickets', + sa.Column('url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('repository_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('comments_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('events_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('html_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('id', sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column('node_id', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('title', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('raised_by_username', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('raised_by_id', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('labels', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('status', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('assignees', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('number_of_comments', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column('closed_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='mentorship_program_tickets_pkey') + ) + op.create_table('users', + sa.Column('id', sa.INTEGER(), server_default=sa.text("nextval('users_id_seq'::regclass)"), autoincrement=True, nullable=False), + sa.Column('name', sa.TEXT(), autoincrement=False, nullable=False), + sa.Column('discord', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('github', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('points', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('level', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), + sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='users_pkey'), + postgresql_ignore_search_path=False + ) + op.create_table('__applicant', + sa.Column('id', sa.UUID(), server_default=sa.text('gen_random_uuid()'), autoincrement=False, nullable=False), + sa.Column('sheet_username', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('discord_id', sa.BIGINT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='applicant_pkey'), + sa.UniqueConstraint('discord_id', name='applicant_discord_id_key') + ) + op.create_table('__mentorship_program_pull_request', + sa.Column('pr_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('pr_id', sa.INTEGER(), autoincrement=False, nullable=False), + sa.Column('pr_node_id', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('html_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('status', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('title', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('raised_by_username', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('raised_by_id', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('body', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column('closed_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column('merged_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column('assignees', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), + sa.Column('requested_reviewers', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), + sa.Column('labels', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), + sa.Column('review_comments_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('comments_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('repository_id', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('repository_owner_name', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('repository_owner_id', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('repository_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('merged', sa.BOOLEAN(), autoincrement=False, nullable=True), + sa.Column('number_of_commits', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('number_of_comments', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('lines_of_code_added', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('lines_of_code_removed', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('number_of_files_changed', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('merged_by_id', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('merged_by_username', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('linked_ticket', sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('pr_id', name='mentorship_program_pull_request_pkey') + ) + op.create_table('unstructured discord data', + sa.Column('text', sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column('author', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('channel', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('channel_name', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('uuid', sa.UUID(), server_default=sa.text('gen_random_uuid()'), autoincrement=False, nullable=False), + sa.Column('author_name', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('author_roles', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), + sa.Column('sent_at', sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('uuid', name='unstructured discord data_duplicate_pkey'), + sa.UniqueConstraint('uuid', name='unstructured discord data_duplicate_uuid_key') + ) + op.create_table('__contributors_vc', + sa.Column('github_username', sa.TEXT(), autoincrement=False, nullable=False), + sa.Column('discord_id', sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column('certificate_link', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('stats', sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('github_username', name='contributors_vc_pkey') + ) + op.create_table('mentorship_program_website_has_updated', + sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), + sa.Column('project', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('week1_update_date', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), + sa.Column('week2_update_date', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), + sa.Column('week3_update_date', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), + sa.Column('week4_update_date', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), + sa.Column('week5_update_date', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), + sa.Column('week6_update_date', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), + sa.Column('week7_update_date', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), + sa.Column('week8_update_date', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), + sa.Column('week9_update_date', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), + sa.Column('week1_is_default_text', sa.BOOLEAN(), autoincrement=False, nullable=True), + sa.Column('week2_is_default_text', sa.BOOLEAN(), autoincrement=False, nullable=True), + sa.Column('week3_is_default_text', sa.BOOLEAN(), autoincrement=False, nullable=True), + sa.Column('week4_is_default_text', sa.BOOLEAN(), autoincrement=False, nullable=True), + sa.Column('week5_is_default_text', sa.BOOLEAN(), autoincrement=False, nullable=True), + sa.Column('week6_is_default_text', sa.BOOLEAN(), autoincrement=False, nullable=True), + sa.Column('week7_is_default_text', sa.BOOLEAN(), autoincrement=False, nullable=True), + sa.Column('week8_is_default_text', sa.BOOLEAN(), autoincrement=False, nullable=True), + sa.Column('week9_is_default_text', sa.BOOLEAN(), autoincrement=False, nullable=True), + sa.Column('product', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('project_folder', sa.TEXT(), autoincrement=False, nullable=False), + sa.Column('all_links', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), + sa.ForeignKeyConstraint(['project'], ['__mentorship_program_projects.name'], name='mentorship_program_website_has_updated_project_fkey'), + sa.PrimaryKeyConstraint('id', name='mentorship_program_webite_has_updated_pkey'), + sa.UniqueConstraint('project_folder', name='mentorship_program_website_has_updated_project_folder_key') + ) + op.create_table('unlisted_tickets', + sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=True), + sa.Column('name', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('product', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('complexity', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('project_category', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), + sa.Column('project_sub_category', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), + sa.Column('reqd_skills', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('issue_id', sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column('api_endpoint_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('ticket_points', sa.SMALLINT(), server_default=sa.text("'0'::smallint"), autoincrement=False, nullable=True, comment='How many points the ticket is worth'), + sa.Column('index', sa.SMALLINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=32767, cycle=False, cache=1), autoincrement=True, nullable=False), + sa.Column('mentors', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), + sa.Column('uuid', sa.UUID(), server_default=sa.text('gen_random_uuid()'), autoincrement=False, nullable=False), + sa.Column('status', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('organization', sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('uuid', 'issue_id', name='unlisted_tickets_pkey'), + sa.UniqueConstraint('api_endpoint_url', name='unlisted_tickets_api_endpoint_url_key'), + sa.UniqueConstraint('index', name='unlisted_tickets_index_key'), + sa.UniqueConstraint('issue_id', name='unlisted_tickets_"issue_id"_key'), + sa.UniqueConstraint('url', name='unlisted_tickets_url_key') + ) + op.create_table('issue_mentors', + sa.Column('id', sa.INTEGER(), server_default=sa.text("nextval('issue_mentors_id_seq'::regclass)"), autoincrement=True, nullable=False), + sa.Column('issue_id', sa.INTEGER(), autoincrement=False, nullable=False), + sa.Column('angel_mentor_id', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('org_mentor_id', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('created_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), + sa.Column('updated_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), + sa.ForeignKeyConstraint(['angel_mentor_id'], ['contributors_registration.id'], name='issue_mentors_mentor_fkey'), + sa.ForeignKeyConstraint(['issue_id'], ['issues.id'], name='issue_mentors_issue_fkey'), + sa.PrimaryKeyConstraint('id', name='issue_mentors_pkey'), + sa.UniqueConstraint('issue_id', name='unique_issue_id_mentors') + ) + op.create_table('dmp_issue_updates', + sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=False), + sa.Column('body_text', sa.TEXT(), server_default=sa.text("''::text"), autoincrement=False, nullable=True), + sa.Column('comment_link', sa.TEXT(), server_default=sa.text("''::text"), autoincrement=False, nullable=True), + sa.Column('comment_id', sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column('comment_api', sa.VARCHAR(), server_default=sa.text("''::character varying"), autoincrement=False, nullable=True), + sa.Column('comment_updated_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), + sa.Column('dmp_id', sa.BIGINT(), server_default=sa.text("'1'::bigint"), autoincrement=False, nullable=False), + sa.Column('created_by', sa.TEXT(), server_default=sa.text("'NA'::text"), autoincrement=False, nullable=False), + sa.ForeignKeyConstraint(['dmp_id'], ['dmp_issues.id'], name='dmp_issue_updates_dmp_id_fkey', onupdate='CASCADE', ondelete='CASCADE'), + sa.PrimaryKeyConstraint('comment_id', name='dmp_issue_updates_pkey'), + sa.UniqueConstraint('comment_id', name='dmp_issue_updates_comment_id_key'), + comment='Having records of dmp with issue details' + ) + op.create_table('dmp_issues', + sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), + sa.Column('issue_url', sa.VARCHAR(), server_default=sa.text("'NA'::character varying"), autoincrement=False, nullable=False), + sa.Column('issue_number', sa.BIGINT(), server_default=sa.text("'1'::bigint"), autoincrement=False, nullable=False), + sa.Column('mentor_username', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('contributor_username', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('title', sa.TEXT(), server_default=sa.text("'NA'::text"), autoincrement=False, nullable=False), + sa.Column('org_id', sa.BIGINT(), server_default=sa.text("'1'::bigint"), autoincrement=False, nullable=False), + sa.Column('description', sa.TEXT(), server_default=sa.text("'NA'::text"), autoincrement=False, nullable=False), + sa.Column('repo', sa.TEXT(), server_default=sa.text("'NA'::text"), autoincrement=False, nullable=False), + sa.Column('repo_owner', sa.TEXT(), autoincrement=False, nullable=True), + sa.ForeignKeyConstraint(['org_id'], ['dmp_orgs.id'], name='dmp_issues_org_id_fkey', onupdate='CASCADE', ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id', name='dmp_issues_pkey'), + sa.UniqueConstraint('id', name='dmp_issues_dmp_id_key'), + postgresql_ignore_search_path=False + ) + op.create_table('dmp_week_updates', + sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), + sa.Column('issue_url', sa.TEXT(), autoincrement=False, nullable=False), + sa.Column('week', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('total_task', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('completed_task', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('progress', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True), + sa.Column('task_data', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('dmp_id', sa.BIGINT(), autoincrement=False, nullable=True), + sa.ForeignKeyConstraint(['dmp_id'], ['dmp_issues.id'], name='dmp_week_updates_dmp_id_fkey', onupdate='CASCADE', ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id', name='dmp_week_updates_pkey') + ) + op.create_table('contributor_points', + sa.Column('id', sa.BIGINT(), autoincrement=True, nullable=False), + sa.Column('user_id', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('total_points', sa.INTEGER(), server_default=sa.text('0'), autoincrement=False, nullable=False), + sa.ForeignKeyConstraint(['user_id'], ['contributors_registration.id'], name='contributor_points_contributors_id_fkey'), + sa.PrimaryKeyConstraint('id', name='contributor_points_pkey') + ) + op.create_table('mentorship_program_site_structure', + sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), + sa.Column('product', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('project', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('contributor', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('website directory_label', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('directory_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.ForeignKeyConstraint(['contributor'], ['__mentorship_program_selected_contributors.name'], name='mentorship_program_site_structure_contributor_fkey'), + sa.ForeignKeyConstraint(['product'], ['product.name'], name='mentorship_program_site_structure_product_fkey'), + sa.ForeignKeyConstraint(['project'], ['__mentorship_program_projects.name'], name='mentorship_program_site_structure_project_fkey'), + sa.PrimaryKeyConstraint('id', name='mentorship_program_site_structure_pkey'), + comment='a mapping for the milestones website structure' + ) + op.create_table('mentorship_program_website_pull_request', + sa.Column('pr_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('pr_id', sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column('pr_node_id', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('html_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('status', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('title', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('raised_by_username', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('raised_by_id', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('body', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column('closed_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column('merged_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column('assignees', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), + sa.Column('requested_reviewers', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), + sa.Column('labels', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), + sa.Column('review_comments_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('comments_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('repository_id', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('repository_owner_name', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('repository_owner_id', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('repository_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('merged', sa.BOOLEAN(), autoincrement=False, nullable=True), + sa.Column('number_of_commits', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('number_of_comments', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('lines_of_code_added', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('lines_of_code_removed', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('number_of_files_changed', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('merged_by_id', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('merged_by_username', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('linked_ticket', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('project_name', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('project_folder_label', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('week_number', sa.SMALLINT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('pr_id', name='mentorship_program_website_pull_request_pkey'), + sa.UniqueConstraint('pr_node_id', name='mentorship_program_website_pull_request_pr_node_id_key') + ) + op.create_table('mentor_details', + sa.Column('id', sa.INTEGER(), server_default=sa.text("nextval('mentor_details_id_seq'::regclass)"), autoincrement=True, nullable=False), + sa.Column('name', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('email', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('discord_id', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('discord_username', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('github_id', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('created_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), + sa.Column('updated_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='mentor_details_pkey'), + postgresql_ignore_search_path=False + ) + op.create_table('dmp_pr_updates', + sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=False), + sa.Column('pr_id', sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column('status', sa.VARCHAR(), server_default=sa.text("''::character varying"), autoincrement=False, nullable=False), + sa.Column('title', sa.TEXT(), server_default=sa.text("''::text"), autoincrement=False, nullable=False), + sa.Column('pr_updated_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), + sa.Column('merged_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), + sa.Column('closed_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), + sa.Column('dmp_id', sa.BIGINT(), server_default=sa.text("'1'::bigint"), autoincrement=False, nullable=False), + sa.Column('link', sa.TEXT(), server_default=sa.text("'NA'::text"), autoincrement=False, nullable=False), + sa.ForeignKeyConstraint(['dmp_id'], ['dmp_issues.id'], name='dmp_pr_updates_dmp_id_fkey', onupdate='CASCADE', ondelete='CASCADE'), + sa.PrimaryKeyConstraint('pr_id', name='dmp_pr_updates_pkey'), + sa.UniqueConstraint('pr_id', name='dmp_pr_updates_pr_id_key'), + comment='Having PR related records' + ) + op.create_table('__community_program_product_wise_tickets', + sa.Column('url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('repository_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('comments_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('events_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('html_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('id', sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column('node_id', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('title', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('raised_by_username', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('raised_by_id', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('labels', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('status', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('assignees', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('number_of_comments', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column('closed_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column('community_label', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True), + sa.Column('product', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('gh_organisation', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('repository name', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('organisation', sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='community_program_tickets_duplicate_pkey') + ) + op.create_table('__community_program_unique_user_data', + sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), + sa.Column('ticket_name', sa.TEXT(), autoincrement=False, nullable=False), + sa.Column('status', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('linked_pr', sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column('linked_pr_author_id', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('linked_pr_author_username', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('is_registered', sa.BOOLEAN(), autoincrement=False, nullable=True), + sa.Column('ticket_link', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('linked_pr_link', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('merged', sa.BOOLEAN(), autoincrement=False, nullable=True), + sa.Column('state', sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='community_program_unique_user_data_pkey') + ) + op.create_table('point_system', + sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), + sa.Column('complexity', sa.TEXT(), autoincrement=False, nullable=False), + sa.Column('points', sa.SMALLINT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='point_system_pkey') + ) + op.create_table('user_badges', + sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False), + sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=False), + sa.Column('badge_id', sa.INTEGER(), autoincrement=False, nullable=False), + sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), + sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), + sa.ForeignKeyConstraint(['badge_id'], ['badges.id'], name='user_badges_badge_id_fkey'), + sa.ForeignKeyConstraint(['user_id'], ['users.id'], name='user_badges_user_id_fkey'), + sa.PrimaryKeyConstraint('id', name='user_badges_pkey') + ) + op.create_table('__dashboard_config', + sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), + sa.Column('dashboard', sa.TEXT(), autoincrement=False, nullable=False), + sa.Column('starting date', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='dashboard _config_pkey') + ) + op.create_table('dmp_tickets', + sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=True), + sa.Column('name', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('product', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('complexity', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('project_category', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), + sa.Column('project_sub_category', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), + sa.Column('reqd_skills', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), + sa.Column('issue_id', sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column('api_endpoint_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('ticket_points', sa.SMALLINT(), server_default=sa.text("'0'::smallint"), autoincrement=False, nullable=True, comment='How many points the ticket is worth'), + sa.Column('index', sa.SMALLINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=32767, cycle=False, cache=1), autoincrement=True, nullable=False), + sa.Column('mentors', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), + sa.Column('uuid', sa.UUID(), server_default=sa.text('gen_random_uuid()'), autoincrement=False, nullable=False), + sa.Column('status', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('community_label', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True, comment='has community label'), + sa.Column('organization', sa.TEXT(), server_default=sa.text("''::text"), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('uuid', name='dmp_tickets_pkey'), + sa.UniqueConstraint('api_endpoint_url', name='dmp_tickets_api_endpoint_url_key'), + sa.UniqueConstraint('index', name='dmp_tickets_index_key'), + sa.UniqueConstraint('issue_id', name='dmp_tickets_"issue_id"_key'), + sa.UniqueConstraint('url', name='dmp_tickets_url_key') + ) + op.create_table('badges', + sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False), + sa.Column('image', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('text', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('description', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), + sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='badges_pkey') + ) + op.create_table('github_organisations_to_organisations', + sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), + sa.Column('github_organisation', sa.TEXT(), autoincrement=False, nullable=False), + sa.Column('organisation', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True, comment='creation date of organization ticket'), + sa.PrimaryKeyConstraint('id', name='github_organisations_to_organisations_pkey') + ) + op.create_table('__mentorship_program_selected_contributors', + sa.Column('id', sa.UUID(), server_default=sa.text('gen_random_uuid()'), autoincrement=False, nullable=False), + sa.Column('name', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('github_username', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('project_name', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('ticket_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='mentorship_program_selected_contributors_pkey'), + sa.UniqueConstraint('name', name='mentorship_program_selected_contributors_name_key'), + comment='List of contributors selected for C4GT Mentorship Program 2023' + ) + op.create_table('community_orgs', + sa.Column('id', sa.BIGINT(), server_default=sa.text("nextval('community_orgs_id_seq'::regclass)"), autoincrement=True, nullable=False), + sa.Column('name', sa.TEXT(), autoincrement=False, nullable=False), + sa.PrimaryKeyConstraint('id', name='community_orgs_pkey'), + postgresql_ignore_search_path=False + ) + op.create_table('points_mapping', + sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False), + sa.Column('role', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('complexity', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('points', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), + sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='points_mapping_pkey') + ) + op.create_table('mentorship_website_contributor_project', + sa.Column('project_folder', sa.TEXT(), autoincrement=False, nullable=False), + sa.Column('contributor', sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('project_folder', name='mentorship_website_contributor_project_pkey') + ) + op.create_table('github_installations', + sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), + sa.Column('github_organisation', sa.TEXT(), autoincrement=False, nullable=False), + sa.Column('installation_id', sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column('target_type', sa.TEXT(), autoincrement=False, nullable=True, comment='Type of github entity that installed the app, usually "Organisation"'), + sa.Column('github_ids', postgresql.JSON(astext_type=sa.Text()), autoincrement=False, nullable=True, comment="identifiers on the github database, prolly won't be used"), + sa.Column('permissions_and_events', postgresql.JSON(astext_type=sa.Text()), autoincrement=False, nullable=True), + sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), + sa.Column('organisation', sa.TEXT(), autoincrement=False, nullable=True), + sa.ForeignKeyConstraint(['organisation'], ['__community_organisations.name'], name='github_installations_organisation_fkey', onupdate='CASCADE'), + sa.PrimaryKeyConstraint('id', name='__github_installations_pkey'), + sa.UniqueConstraint('github_organisation', name='__github_installations_organisation_key'), + sa.UniqueConstraint('installation_id', name='__github_installations_installation_id_key') + ) + op.create_table('ticket_comments', + sa.Column('id', sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column('url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('html_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('issue_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('node_id', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('commented_by', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('commented_by_id', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), + sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), + sa.Column('content', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('reactions_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('ticket_url', sa.TEXT(), autoincrement=False, nullable=False), + sa.Column('issue_id', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('comment_id', sa.BIGINT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='ticket_comments_pkey') + ) + op.create_table('product', + sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), + sa.Column('name', sa.TEXT(), server_default=sa.text("''::text"), autoincrement=False, nullable=False), + sa.Column('description', sa.TEXT(), server_default=sa.text("''::text"), autoincrement=False, nullable=True, comment='URL to the product entry on C4GT wiki'), + sa.Column('wiki_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('channel', sa.BIGINT(), autoincrement=False, nullable=True), + sa.ForeignKeyConstraint(['channel'], ['discord_channels.channel_id'], name='product_channel_fkey'), + sa.PrimaryKeyConstraint('id', name='products_pkey'), + sa.UniqueConstraint('name', name='products_product_name_key'), + comment="A table containing all 'Products' in C4GT 2023" + ) + op.create_table('contributors_registration_old', + sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), + sa.Column('discord_id', sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column('github_id', sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column('github_url', sa.VARCHAR(), autoincrement=False, nullable=False), + sa.Column('discord_username', sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column('joined_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=False), + sa.Column('email', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('name', sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='contributors_registration_duplicate_pkey'), + sa.UniqueConstraint('discord_id', name='contributors_registration_duplicate_discord_id_key'), + sa.UniqueConstraint('github_id', name='contributors_registration_duplicate_github_id_key') + ) + op.create_table('user_certificates', + sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False), + sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=False), + sa.Column('certificate_link', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), + sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), + sa.ForeignKeyConstraint(['user_id'], ['users.id'], name='user_certificates_user_id_fkey'), + sa.PrimaryKeyConstraint('id', name='user_certificates_pkey') + ) + op.create_table('app_comments', + sa.Column('id', sa.UUID(), server_default=sa.text('gen_random_uuid()'), autoincrement=False, nullable=False), + sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=True), + sa.Column('api_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('comment_id', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('issue_id', sa.BIGINT(), autoincrement=False, nullable=False), + sa.PrimaryKeyConstraint('id', name='app_comments_pkey'), + sa.UniqueConstraint('issue_id', name='app_comments_issue_id_key') + ) + op.create_table('__contributors_discord', + sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), + sa.Column('discord_id', sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column('github_id', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('github_url', sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column('discord_username', sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column('joined_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=False), + sa.Column('email', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column(' name', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('chapter', sa.TEXT(), autoincrement=False, nullable=True, comment="the chapter they're associated with"), + sa.Column('gender', sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='__contributors_pkey'), + sa.UniqueConstraint('discord_id', name='__contributors_discord_id_key') + ) + op.create_table('issue_contributors', + sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False), + sa.Column('issue_id', sa.INTEGER(), autoincrement=False, nullable=False), + sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), + sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), + sa.Column('contributor_id', sa.INTEGER(), autoincrement=False, nullable=False), + sa.Column('role', sa.BIGINT(), autoincrement=False, nullable=True), + sa.ForeignKeyConstraint(['contributor_id'], ['contributors_registration.id'], name='issue_contributors_contributor_id_fkey', onupdate='CASCADE', ondelete='CASCADE'), + sa.ForeignKeyConstraint(['issue_id'], ['issues.id'], name='issue_contributors_issue_id_fkey'), + sa.PrimaryKeyConstraint('id', name='issue_contributors_pkey'), + sa.UniqueConstraint('issue_id', name='unique_issue_id_contributors') + ) + op.create_table('vc_logs', + sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), + sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=False), + sa.Column('discord_id', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('discord_name', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('option', sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='vc_logs_pkey') + ) + op.create_table('__mentorship_program_ticket_comments', + sa.Column('id', sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column('url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('html_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('issue_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('node_id', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('commented_by', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('commented_by_id', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), + sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), + sa.Column('content', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('reactions_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='mentorship_program_ticket_comments_pkey') + ) + op.create_table('contributor_names', + sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), + sa.Column('discord_id', sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column('name', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('country', sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='contributor_names_pkey') + ) + op.create_table('user_activity', + sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False), + sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=False), + sa.Column('issue_id', sa.INTEGER(), autoincrement=False, nullable=False), + sa.Column('activity', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), + sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), + sa.Column('mentor_id', sa.INTEGER(), autoincrement=False, nullable=True), + sa.ForeignKeyConstraint(['issue_id'], ['issues.id'], name='user_activity_issue_id_fkey'), + sa.ForeignKeyConstraint(['mentor_id'], ['mentor_details.id'], name='user_activity_mentor_id_fkey', onupdate='CASCADE', ondelete='CASCADE'), + sa.ForeignKeyConstraint(['user_id'], ['users.id'], name='user_activity_user_id_fkey'), + sa.PrimaryKeyConstraint('id', name='user_activity_pkey') + ) + op.create_table('dmp_orgs', + sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), + sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=False), + sa.Column('name', sa.TEXT(), autoincrement=False, nullable=False), + sa.Column('description', sa.TEXT(), autoincrement=False, nullable=False), + sa.Column('link', sa.TEXT(), autoincrement=False, nullable=False), + sa.Column('repo_owner', sa.TEXT(), server_default=sa.text("'NA'::text"), autoincrement=False, nullable=False), + sa.PrimaryKeyConstraint('id', name='dmp_orgs_pkey'), + sa.UniqueConstraint('id', name='dmp_orgs_id_key') + ) + op.create_table('contributors_registration', + sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), + sa.Column('discord_id', sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column('github_id', sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column('github_url', sa.VARCHAR(), autoincrement=False, nullable=False), + sa.Column('discord_username', sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column('joined_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=False), + sa.Column('email', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('name', sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='contributors_registration_old_duplicate_pkey'), + sa.UniqueConstraint('discord_id', name='contributors_registration_old_duplicate_discord_id_key'), + sa.UniqueConstraint('github_id', name='contributors_registration_old_duplicate_github_id_key'), + comment='This is a duplicate of contributors_registration_old' + ) + op.create_table('pr_staging', + sa.Column('id', sa.UUID(), server_default=sa.text('gen_random_uuid()'), autoincrement=False, nullable=False), + sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=True), + sa.Column('api_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('html_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('raised_by', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('raised_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=False), + sa.Column('raised_by_username', sa.TEXT(), autoincrement=False, nullable=False), + sa.Column('status', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('is_merged', sa.BOOLEAN(), autoincrement=False, nullable=True), + sa.Column('merged_by', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('merged_at', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('merged_by_username', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('pr_id', sa.BIGINT(), autoincrement=False, nullable=False, comment='github id of the pr'), + sa.Column('points', sa.SMALLINT(), server_default=sa.text("'10'::smallint"), autoincrement=False, nullable=False), + sa.Column('ticket_url', sa.TEXT(), autoincrement=False, nullable=False), + sa.Column('ticket_complexity', sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='pr_staging_pkey'), + sa.UniqueConstraint('html_url', name='pr_staging_html_url_key'), + comment='This is a duplicate of connected_prs' + ) + op.create_table('github_classroom_data', + sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), + sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=False), + sa.Column('assignment_name', sa.TEXT(), autoincrement=False, nullable=False), + sa.Column('assignment_url', sa.TEXT(), autoincrement=False, nullable=False), + sa.Column('assignment_id', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('starter_code_url', sa.TEXT(), autoincrement=False, nullable=False), + sa.Column('github_username', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('roster_identifier', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('student_repository_name', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('student_repository_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('submission_timestamp', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=False), + sa.Column('points_awarded', sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column('points_available', sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column('c4gt_points', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('discord_id', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='github_classroom_data_pkey'), + comment='Table for save the details about github classroom assignment datas' + ) + op.create_table('connected_prs', + sa.Column('id', sa.UUID(), server_default=sa.text('gen_random_uuid()'), autoincrement=False, nullable=False), + sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=True), + sa.Column('api_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('html_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('raised_by', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('raised_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=False), + sa.Column('raised_by_username', sa.TEXT(), autoincrement=False, nullable=False), + sa.Column('status', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('is_merged', sa.BOOLEAN(), autoincrement=False, nullable=True), + sa.Column('merged_by', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('merged_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column('merged_by_username', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('pr_id', sa.BIGINT(), autoincrement=False, nullable=False, comment='github id of the pr'), + sa.Column('points', sa.SMALLINT(), server_default=sa.text("'10'::smallint"), autoincrement=False, nullable=False), + sa.Column('ticket_url', sa.TEXT(), autoincrement=False, nullable=False), + sa.Column('ticket_complexity', sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='connected_prs_pkey'), + sa.UniqueConstraint('html_url', name='connected_prs_html_url_key') + ) + op.create_table('__mentors', + sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), + sa.Column('discord_id', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('github_id', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('github_url', sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column('discord_username', sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column('organisation', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('name', sa.TEXT(), autoincrement=False, nullable=False), + sa.Column('email', sa.TEXT(), autoincrement=False, nullable=True), + sa.ForeignKeyConstraint(['organisation'], ['__community_organisations.name'], name='__mentors_organisation_fkey'), + sa.PrimaryKeyConstraint('id', name='mentors_pkey') + ) + op.create_table('discord_engagement', + sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), + sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=True), + sa.Column('contributor', sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column('has_introduced', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True), + sa.Column('total_message_count', sa.BIGINT(), server_default=sa.text("'0'::bigint"), autoincrement=False, nullable=True), + sa.Column('total_reaction_count', sa.BIGINT(), server_default=sa.text("'0'::bigint"), autoincrement=False, nullable=True), + sa.Column('converserBadge', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True), + sa.Column('apprenticeBadge', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True), + sa.Column('rockstarBadge', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True), + sa.Column('enthusiastBadge', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True), + sa.Column('risingStarBadge', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='discord_engagement_pkey'), + sa.UniqueConstraint('contributor', name='discord_engagement_contributor_key'), + comment='engagement metrics for contributors' + ) + op.create_table('__dev_onboarding', + sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), + sa.Column('organisation', sa.TEXT(), autoincrement=False, nullable=False), + sa.Column('email', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('repos', postgresql.JSON(astext_type=sa.Text()), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='Onboarding_Dev_pkey'), + sa.UniqueConstraint('organisation', name='Onboarding_Dev_org_key') + ) + op.create_table('chapters', + sa.Column('id', sa.UUID(), server_default=sa.text('gen_random_uuid()'), autoincrement=False, nullable=False), + sa.Column('type', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('org_name', sa.TEXT(), autoincrement=False, nullable=False), + sa.Column('primary_organisation', sa.TEXT(), autoincrement=False, nullable=True, comment='the organisation that the chapter is mapped to'), + sa.Column('sessions', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('discord_role_id', sa.BIGINT(), autoincrement=False, nullable=False, comment='db od of the corresponding member role in discord server'), + sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), + sa.CheckConstraint("type = ANY (ARRAY['CORPORATE'::text, 'COLLEGE'::text])", name='chapters_type_check'), + sa.PrimaryKeyConstraint('id', name='chapters_pkey'), + sa.UniqueConstraint('discord_role_id', name='chapters_discord_role_id_key'), + sa.UniqueConstraint('org_name', name='chapters_org_name_key') + ) + op.create_table('__community_program_tickets', + sa.Column('url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('repository_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('comments_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('events_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('html_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('id', sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column('node_id', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('title', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('raised_by_username', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('raised_by_id', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('labels', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('status', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('assignees', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('number_of_comments', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column('closed_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column('community_label', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='community_program_tickets_pkey') + ) + op.create_table('mentorship_program_website_commits', + sa.Column('node_id', sa.TEXT(), autoincrement=False, nullable=False), + sa.Column('url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('html_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('comment_count', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('date', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), + sa.Column('author_id', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('author_username', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('author_email', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('committer_id', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('committer_username', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('committer_email', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('additions', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('deletions', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('files', postgresql.JSON(astext_type=sa.Text()), autoincrement=False, nullable=True), + sa.Column('project_folder_name', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('pr_id', sa.BIGINT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('node_id', name='mentorship_program_website_commits_pkey') + ) + op.create_table('pr_history', + sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=True), + sa.Column('api_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('html_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('raised_by', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('raised_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=False), + sa.Column('raised_by_username', sa.TEXT(), autoincrement=False, nullable=False), + sa.Column('status', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('is_merged', sa.BOOLEAN(), autoincrement=False, nullable=True), + sa.Column('merged_by', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('merged_at', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('merged_by_username', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('pr_id', sa.BIGINT(), autoincrement=False, nullable=False, comment='github id of the pr'), + sa.Column('points', sa.SMALLINT(), server_default=sa.text("'10'::smallint"), autoincrement=False, nullable=False), + sa.Column('ticket_url', sa.TEXT(), autoincrement=False, nullable=False), + sa.Column('ticket_complexity', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('id', sa.INTEGER(), server_default=sa.text("nextval('pr_history_id_seq'::regclass)"), autoincrement=True, nullable=False), + sa.PrimaryKeyConstraint('id', name='pr_history_pkey'), + sa.UniqueConstraint('html_url', name='pr_history_html_url_key'), + comment='Holds records of pr webhooks' + ) + op.create_table('ccbp_tickets', + sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=True), + sa.Column('name', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('product', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('complexity', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('project_category', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), + sa.Column('project_sub_category', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), + sa.Column('reqd_skills', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), + sa.Column('issue_id', sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column('api_endpoint_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('ticket_points', sa.SMALLINT(), server_default=sa.text("'0'::smallint"), autoincrement=False, nullable=True, comment='How many points the ticket is worth'), + sa.Column('index', sa.SMALLINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=32767, cycle=False, cache=1), autoincrement=True, nullable=False), + sa.Column('mentors', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), + sa.Column('uuid', sa.UUID(), server_default=sa.text('gen_random_uuid()'), autoincrement=False, nullable=False), + sa.Column('status', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('community_label', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True, comment='has community label'), + sa.Column('organization', sa.TEXT(), server_default=sa.text("''::text"), autoincrement=False, nullable=True), + sa.Column('closed_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True, comment='date-time at which issue was closed'), + sa.Column('assignees', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), + sa.Column('issue_author', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), + sa.Column('is_assigned', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=False), + sa.PrimaryKeyConstraint('uuid', name='ccbp_tickets_pkey'), + sa.UniqueConstraint('api_endpoint_url', name='ccbp_tickets_api_endpoint_url_key'), + sa.UniqueConstraint('index', name='ccbp_tickets_index_key'), + sa.UniqueConstraint('issue_id', name='ccbp_tickets_"issue_id"_key'), + sa.UniqueConstraint('url', name='ccbp_tickets_url_key'), + comment='A table to store details of CCBP Tickets from various projects' + ) + # ### end Alembic commands ### diff --git a/migrations/versions/__pycache__/e7eab9b29e41_initial_migration.cpython-310.pyc b/migrations/versions/__pycache__/5709cf5c2772_baseline_migration.cpython-310.pyc similarity index 52% rename from migrations/versions/__pycache__/e7eab9b29e41_initial_migration.cpython-310.pyc rename to migrations/versions/__pycache__/5709cf5c2772_baseline_migration.cpython-310.pyc index c145ec14d52dae01d499664815eb1092f44d35c5..ad46824b23d9cb43023b6e321d68bbc5fcf20809 100644 GIT binary patch delta 114 zcmbQpKAD{>pO=@50SGFWOyoMrndp>QoSKuFmpbvbB#)`Nfn{=IxzQv>H69Za rD$ZYT#xMCQn8H*J>Ye delta 113 zcmbQtK9QX(pO=@50SMO2pU8ERBf&E-vm`SyXW|`6o>cSH#3aikBg<41!^w<{YCJ}U qRwgEThUO-w=9ZI<84XcY6fhPu7EOM_7$}0QF+LMw;ASr-PeuR(1t6CI diff --git a/migrations/versions/__pycache__/947179aaa97c_version_added.cpython-310.pyc b/migrations/versions/__pycache__/947179aaa97c_version_added.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..276a98769bdd9285dfbaeb83fc8465df175872e9 GIT binary patch literal 32678 zcmeHw34B~vd9O4hjYgwg8cDXSIO8R@<=Bq5cq0jTOJZvoaW!mub_BWiKedjIhZ6BkH@J|OWo`a9~dVHO06K(l6(Jm5| z_W49UF`vvQ+vz%4>6q`#cXGL-lFD}h)>%oq5`Ciol^yv(lm^5gN<*UKOxx6OW9|ix z>y>NOTu}%|WHP6ni)Fq(cH}^A|L(mz_UZe?%zUWpM*5FQ%adqmno%9soRt?rqp9fy{6P>O8urZU`m6gG-OJ{rZi$oSyLJ{r7=?)H>DM( zw9=F&Olg%VO`6gTrnK6WZZxHwDXlT3wWhSrl-8TlO{TQLls1~uCR3U+rOl>vvng#c zrLCs4&6I92rR}D)!<2TK(k@flZA!PA(jHUVYfAe}X}>8QFr{ai(ru=6yD8maO3yZ> z=TJ$s!6Xl!@Ow+8>4ieQT)N=Yp~eetVXo}eYwnW2cHXI?;LVj63JY#+)-BE#E;-X) zx$YE7wfXrn+OH~baj{yiFPTPquykpzxKM}5`-6poa4y0&rfWr4l&iD;uqb<_nkx!U zb+$O`&{TeRp`fPp`%1NH)hX2-fl)nwbhccdTbwSGDn-w8Yqj};DAtRNtyeG3(ts<` zqwbcc7YSvU=`5&li**>ipBD2Ag_=9-`F)F?;}(jgdif#{_J=}!3f1Dgt- zMNBFX_57}jr9!2K0f)<;w@B;==Jv;kC&6_=QR?>=7Z!pch8UHS~u7Vqt3RCU*8$$96egbVc`~vr471?BKr5IVbRQ4P*eLvv>^)UKvXNkLXT?Ru+$;AhN?$?pjP$#l;UwVBo)t??K+D%wcJSo8RtRSs8Frd3mn`` zC0vTn5Er;LsCaR@;=H-dy$2xvdZK#x!1;5;nDYm{MeqC($GcFkEu7axoj<($uCwR8 zxuWZct@AYjsh&M3tnFU40Tk!k<- z{r0!JCxJv$h?E16L!^sGb%<0?+?N!Idg8&3D{Uxuilpe^-%gRj->%DDfTTqFawoU! zmMsBE0@lH>OcN|65~4@+UQS#=WXz#hAGho`EonsAdjPPmD~OdtBMw?%Ll)Su885(Q zq@KR78)IcJw@1btZ2=#%j62>mZc6lv6--f&qG%;k6y_Z9i+=5#n=oSrw68L-0RKtA zGBJMKV1ca;=s|6dKmZ4NQzEA%wFb~QU)P3U>%@B1@}>|}OJxIKy`&Ks8KGQctCnm&J=JF76`M!3s&2YJ2VS_-BHB!^L-HaIUbw@q@pwcVG zJ>c$6(>j3dvcPs*V7FRe(6$G=W72}AHNo~+`tG&B_E}*2EwE;rONs+%2P+O~{h&B7 z+b*7k&u!v%aff&|OPo0V9K-R9I7nsmI&?X!bo+3a>&$Jf7e_D$BoUe;DF_z{3nXyV zzydzs30PbsL)MDg9&wkFmzHkGYOVEmH_bCBGNAVuKFb9kNDnp};Zst?wNCoZr170|O}He5ZA~hpUDV=|;yz2?3l^Bo zn^e#avW-~Ryk(B6WxSdN7OZ0_v0(iD{b)I?IOE1R<28*!9IqRW4~j)o9v2s>d}T!O z{gUB(T(_6I#y`+Bdr~Y}U=0iGTM-Lmk}=!FO5YW zFKYpRc?Gt()QJW z#n+JKuA@KDw6>Bo{&%2UO{8JrJ%7W zO+jP&rJ&WQ)+y+#09Briy(n{6)&0a(>a5YJ>i3gYF2YlUV*}Opr>s2qX=~e`v9^V0 zvW)w)fQD8fHQUcwM~K<7YWwrnw!dJ7{-PE7AfO??qsFZP8j6t=^oj+V6650aq}8u& zKi&4DZOVRriM9F-XdmBU{c?0ZZudqD%oe4+3GG_h2KRc4=@odhz~%$kZ<==64tlk6I7@Mf~gK6jT3RT^q8Ve?$M~6SjYEiSgC^Ol(EvIp5cW zIeC+e_zxwuKJjkh8>zil{HG~r#QRNoKzxA8SH_eyKgeT~H^BOf?;i1?SafAO4`|+} zC&h=G@Z+5sot^NwmWZiE-JrM|+DJS6j{sNF$lw&g@e@py zT}Oks@sko`#_m%}#yY0_^i}HmQ5WA6Jp>q5As*{9v9Uhe!rOdKj>CN(<~{*#j{pj* z=DN>E>ITKT%5KkLUCSKDn+L)IVxev7S`8QxPqF(+gO5BuQ+V~=jqWXt$N&*Uv4@XNRH$XeA z&`#8jpf-h1S9GTeNZQOActxdkp*f94p+|R=6M$rh`YS3%!F5kqkD5*sLs6c>yB9bp z--6zK5lu2@d8p~fJZkm9)d0_`_At1{&LehbD#tclYw1ACAsO5VRfUbnSs`Ja)He64rn7H-ME#!w(+h=xe ztm}#!&+Ob_d)n3&PyT=I+{kmSidv1t0`SQeaJoN+*eilHc7W)yn!Ez{?-Q@X^^Njc z?+NcCJIonR5qKRu`V>DgvYv0PL0{XM-$V;IFtxS^#a6gFy8wOGdm}C4A;dbMYc0_V zo~ZL9rga_WX?6Y`#1i3iV}0j`B22xy%XlSdA6}VdTx9Bi$NCo03{F+7cWQA4V})NW z`q?fugZUJV4Z>^HO_8;V^kW0@2t4UyZP`dyfNp|URd*9`g?pRe8Df5`gkIJH{_+;^ zSG0iBdS%n3V8u6a0G)|j+d>XjIL^?la5_bct-s=b*6A*JgU!G)ddDlcQ)5}R(ZjNr z1aynk5$z?w6B>PM6I5EyHqwPFD@^=$3$@qr7s}U&hB30zNz4~W&azKKOX7KGT~?UU zPB$hUwB6N&FHu4Ft7GR7fZAtM)*9x6h@JT<4E8q7C3&-td86)rV4nTg#~Tss;RDXx zkGVoV93#rP4)9#+ITFnEEHf9aQ}WpqM7og85sV<$E;B0H-_{Imy8ma{X=)FI-f_=M z5!`JYyWP&;cK}0*Lpe4@-Ln}=ci4mIl<%=$={DMQ$WQ?ZqkL^`It)yj_X9_)zEX1T zXpD1r@|^ZPZeURs+YuHk3&}9pDSTt+FqTtv#`HVtGzNNM@neAAWzLA+Cnd}p7d>6T zxeZ$0P4Wb1aBhREW9I6;xT0qrWL*YV&ofs8xH?W(SH_L*-NW3abtPs&XnVqFSs>Ln zSYT>T9q}P?&AL9Tk~+&zPh3eGTu&KXmun}_H`s=~K(m*RL_tIcs6ValkxN|8FyDe5 zC7#AmD?qE$ew*zCr}KuRm?;+F^2naST(^{XoHNqA89iS>SNQu~h*8*|a9sC7gB$bd z!5*}@w>gH5c@Q~%o<;-*4qXns?2F8J%{p?v3D2a}8<0nr{#er!;W=vKo;4}1{=ARG z_r)xiKu5ltakZRag9k%RxTeHA059+u9Y)W-$I$!D=~)r;VFYk9tbNk~7MQ!FpTs~} zz<*~^J9>q7?~Xi=L1`w`X4clmJUHOe(4c#$cX^Tk3Z(3v#vSHL-uUCx5 zo!=Lcn|w|w@Ri?haBa1Q#XWHRKHZ!J|7M++PlZM$DLMNR!%K=(Udp@-@T1-Ib?mal z)paP)4LUiB>&rECD57sT=ZRYouzz_oRIClbuJ(+z#Gbm;+kc< z$M(%>5&4W)19!P%8y^QKt@lwcN9)L|aTf%4+)(;~(5Z^K?*=$Ml@)l9A2gJb+<%BE z?J>UNhYh8e3FT|-hnjY9QcKOLQ%En24q(2`Xj7&tF~P=V;l^e=Lk+ zL_5B-);dZ3Yht?i+A#IYkdVzXo4svPY-+*%&@*Yw{U0~;$8Eu~@3%dZ4oS!Eitvcv zyw%YC6KOSC*TPot6Uqw6W?skk@{|1iQxU7d6Y8j!CkH>RdPq6^49g){ZGP79;ag)V zflQ3e>xd$v zi2D$W&)0DnIHP4PDg8x5Df#gS%@xH9SLh0VUyo=>-CM`iFB$4vt;BK7T+T|2Q{?zM zCG1bBhtJppK3y>V1DNF$QB=?_cHSuF0gJ8^NrC-A6FxNyEPTh|dY@D`n1ZX#XBARD zzicEzcKAjmkvBmiSs9T7`YT2v-xABhh;KHOhH?B=EA-b4RBINk$iHr&W(4sT0}HqO z4J*{%))IqauHTBBC7StA^V=9Z7Ciy_J67mh0gZaYB(KKt{YUKiTku{=>UWiF-UivI z|17{Dz94?$ZHqy7<7%26qws{QeGQhE)K4b160O8Q8HC7^!-D7-1Y zdxZR-E^{7_+CP;L>3^?!l-)}#OBRcrt1s-}Im0Wca`6jJ_@RgcdsReyjFL))G!Wb0?fESVbWE$*}AEzXV9*8^05t6tSnnFxuzMx@Y~5Jd!n9eWBI=RiN&R z(5GmGq)!s%o%i55`FEo=KPit<`vl#2f7Iw3ooha3uISGD<4XI!0_~e@=cHhpzS`c_ zcEWugGO_rHvN-z?F54=d_?8#9K zS#}Bw7n~(z^Xs7GUG8%U<&4V7Qz#2Rx#(8>^jv+uQlQePszQpS=_Sq)Mcw>?2pHWm zAOtC(M4?#6*y(zC-f7gTj-QdKp$P7GOxMJc--&!tz-!YoQUo!o?lDGDCoIBT-2t5$ zRU}kRP9sX5#8`IU2X-+|jm~QA(x$1Yq~DK9Wcophs+r*{k(PF>_sUfSq7Ipk8SmC8hE?h^8=O(jDEbSb9zsad2obX>^SlZn$DzXO?} z9E@FCL^83Zd`Ahn)R0H=fd$7cQ?f<(#Xupy&X!H7AQm|<6jI2M=<-{y(J0_l0nOdJ zF>qsT)p?)}t=hCv=52xm3Z8TS#sdeCGOa`z;->tLO1XN$PwC{0AZDTBJU}_$Qr?Af zg%UQ_ozh&jhAeIbs~78wQ002L4i!v^8n{%st_;E0sxa6m6RjE}}LYgP1 z?&NzEUCf~YmC{Tb0W|W4Aa`YWE!j)NEb!6FEfAvJ>)f1WwxF!WbB%& z)h_sb-dt_5A}CQ;c?JeNQ>=JSepKt3>f}jlJwjMyTnGFvQ%Xd4EdlzaC>D^mklYGi zQxd@9LYc3JgAN+jAnl>tecK3|ygj3y&izG+`KYbra1UwBk)wAWJa_yol8TYixGy6# z`hxbPVngOq^Q5unjLOlM({;H;vgI5l7o+^CyD;Da4ft~UqP$y;A((ps$12w}^y!K~XJf2M^rLJkVF}GolBHL5I`y#s<{_Lpv{v6 zH5$4^zFx42?H^rDD+xy1lv2^5C_^c)=?Ij}fe$AZLjyRapFbJ}BHF24gocj?-Dk=b z^dcuhs^1$ZIcDplB?)GS#1vLbDXZIwK^=80c!;+a>vJ`i*K|sA=y#BP`aKdX z$?R1ZZVh<&T^^JPD$&@~GMi&;J#K7SPD`4X#djNFH=x579jOe4lA0EXXbutUUOt*% z5h+wzg`P!&P;OIBMmj_pFn6fbl$7Naixz=)HiLFGgLXHAZfyqbX$I|W2JLGG?T>&) zC8}_BD*h_4oEW?l>!by|%MRXc2j6N3@3DjT+QIwm;QdYDekFp!43f=Kzz0vu;`177 zubO~Uu4%h0`mEDrhLVlytg~|c-)!$h8HOW$$Z7Alx8=;Lk;YP!6dA0i2S~*RHGPz9 zca{m{p(Fl z;5gRAh5STQGoF#^8aFo8tP{0%KW^>io)R z2UWpQ^NMJdyy6;_c3IEDf%14PP*!tz?mm)!H|^KhKpW|e2qf54_2MhNRF7JUDQIhq zQX@EW7wbDBri(>)iPKi=HleLxic|B7M$g6_=rdhLI%>oxdJj3h)W~c+aZpX!=rehI zIl{idTq@TBuR%$FWq6$*Yp!NmbIov#m~_pJO?MYB0^ywNP?l|vA}aLZeI67M@l8(U zHJ)mhA>dvTF12=MY6_UtAO@H=SH^*Men@S@w`e9w?S(;g5P z-7F0l;$#?(=-lK%*4&}sVGqf*gPbs;!5R`@Bh9XNfzwxGv&V^$={gN7r$qnWL${&> zLb2`zFooYc;CE9GT4jp)Nt%QPvkHPyh}81hMI-vXa&qYq{PyknjiF7E%I=IX!X)12 zV2jmwJOtGZdEDJJF7(m#X^bmhid@XVo;D-$%>#HXfdRlG8@E_pwKZ9{^xHd?5rxy8 zN3&IjahGm;+qL_E>1Gd;UtW7!|K<3$#MLpJh)s=cMfh4yTArJ!x$JA_DFUnGv#AwsjF88I7ql{hNTYq{-7I`WH9TH zk;gGr3lNRUK1Ig2+X&Gi89>O?^%B077?z%o@xN$d;A|3a}evrY1+Wj=cL4!<*5qF@o4CXK2kyv2w0@ zEg04rUXe7!8h)?~)h&X^gSAtGV?hUNr>@iS#%AJMXL%mSFZmwDB&KqTD%2rDRO#09 zVoWcdA$n;o42M-YIYWN4+Q`VdR_`v5tl2~y-D)v)tEc5Y`5nt{PU_6|cE@pK^e22n zOM#cubFkCnvz&Xd(UWiL$eogJ>fqxYcOywKa4JTO#2(w8KR*&{!i?QOjP0O3Wqw4i za{65nk{LGebi{2vkD@(bxCqplqSnKOE^|J7=^7j<+euM|q1pCT@#+)g` zpmI1r#6k%Enhnz zm)Z?V!NPQN=wsfD!A7o6`CdwF#S2!fz1ZkBwx|qiFzFE;I9f*uW1@V#yN;&KN}GT? z2pwoOZaR1Rc&=V!R)I6Lz~{h2cZvANig>Afq3oyWn+|6htMqw~K3CnQ3YaWU+b@n0 zG>l^+I@Tnpiv3mwm#Vcfj%^2X{9Xk%4xHDMKZ6eBHl9)=Y=oi&XPxdEA`A^2>y9&`nO5*s+SmrE8)THilF?wf~~~B$LthW+2J5iD)ff zX?6`q>sTpP5YoKQWG~O$<2{vj`m}i(r$$>sl*NnzXFzd-*t6W$S`1R-^0zLxU&JsP zyU*f9RCH)k%$d+Hz7$5@^F%L6a>qz0LxC{WZNK~gLP115^r|Si8jptn;hO`E^{wX( z-bm%rdsJ}mj1T2WUh@34KLmjP!(Hy60T#GHj=Nyje5gzfhFrXcjtyO}MgF)~U!;`@ zw3#DoR_HNH1&xu#@s(PR5G>;*R`ufif_bY`@e z0-5H5j6cL#}cX}V%n!_tfZ8)poFnBb(bjY6a_1e$LzOE+#@mE}vWy7}P( zmPRMvrt(LCsXTMx1Amshb!iO3d4B1iw6#7^)YxFoE8tDldjmm9{8l1+UtP_dChw~O zVUur6-VJ0teX2nIe!-=a9J+b$xxF;$I^x?_@^m?9-=(>7X-*w!UZSlk-w5^9fu-cA zk0&Y$(j{ScKNNoBp)`v%5s-EJIu>aX>|a+!;IR#CHc|;^&8B>Z4qOpR)2kNESuBxz z`0$}qxwGW;HVUwGD~N4xy);E9l~7aTl&`0j(J-M3Li z*P)Xqj~~U0A_vd$yY)idyD$fA?6lv>cIapC1TAp;u}RLIJ9Y$bV!h|!@#BQl;E|K( z4jn&QICc8y;bUixojl=pJogNJpQXE;1Bb~lgGqu{rpeL?MwVX1%C9q{hn>7PbhWjm z5}Vtz`0Z`)cq)~mlKfrKzCyzvPxZ<+RFlJJ44+A^B}gyz{X%M(AQJ|v$I*YsxLu+> z^>Aub!)P=bZx|zDjK@-aG!Ko3c2B0dWV=au{X{AqV4+8lZPEJ6sewrzOV? zi8&rgWrG?jh3I@V)lXOQSJp@#$Zy$CjuB8VX<&@OCkf}7p3DIM#M9YC;=i)V#MiSO ziKnuii6^tE#1q-B#MiRv#8koe-nVB!ne zp~UC2!-+?-BZ-Hz*~I6vqlwRE#}c2(jwc?_p;|*;R>8WG53J&)$&u zSax;dquCo1AL-4dKb%^tY1Oh}%z&qcgBtLe_^Rm+{^~Z-zY~xakMhaXB*~6`Cqi;x z-M$%Dk`Mjdl3RZ^HLiI!iP~X33UJBcn(8Kj1C@-XKpvYJLUT}!Y)AF|q#<8Q4-)2M z{JR?8L`8pl$Kz?HkFdkllWC?;x0A92ygl_;Z{N35LSwnahsV|=J~XyA@xlIei4XLz zPrpCCVRc9nL@!eciijGzp5S^a5v@$em(v4bscHQsZUnMhjk4qv`aG0gxtaMTA@ZB* zQTJ#XT&I30Kb2;WFvfPeLe_sZouR%Hsy?g(+WG?>Sj!X;o{Z~f(ko~d8l5a3Gkh)G zO{0;G;tH~oT0(6Ub(oKEXFf1K@%{?$uctfZNE7YrF*adKV-Dh%XbLz%CE`&4!OUMw z4Uw+p7(3zvVC|`|qm*(%EH=h;{+LrJS`z%XwBazgM^GUy?86#GZiK zlTHBwu1P!z6S9~=hNA|&SdOiLL!eFbQ`4jABo0eSc6>fPqAkGAsnwF-SohMSx+VEb zyRDFYPl7tBWh|-o%qQGC@F+P>H+$Offcez0--Q?c)=GtfpF&KJ$lpz%!cF1}&m2~; z3n-V=OH}ce_F@Gg`}~r7Gseh|1b}U#jJ7y0{p)Y#>C}U&|G19l;t*eU_hfr?w=u{!qHrUmhNYh%7lxsyM*?;uE0oh{i1^@s6 literal 0 HcmV?d00001 From 89d70bf321985490def04dedcd461232ca15dcab Mon Sep 17 00:00:00 2001 From: jaanbaaz Date: Sun, 22 Dec 2024 21:08:19 +0530 Subject: [PATCH 18/32] Model and migration changes --- db/__pycache__/models.cpython-310.pyc | Bin 44176 -> 44062 bytes db/models.py | 8 +- migrations/__pycache__/env.cpython-310.pyc | Bin 1963 -> 1970 bytes migrations/env.py | 35 +- .../5709cf5c2772_baseline_migration.py | 30 - .../8d1e6a7e959a_initial_migration.py | 1723 +++++++++++++++++ .../versions/947179aaa97c_version_added.py | 1004 ---------- ...f5c2772_baseline_migration.cpython-310.pyc | Bin 915 -> 0 bytes ...6a7e959a_initial_migration.cpython-310.pyc | Bin 0 -> 32127 bytes ...947179aaa97c_version_added.cpython-310.pyc | Bin 32678 -> 0 bytes 10 files changed, 1751 insertions(+), 1049 deletions(-) delete mode 100644 migrations/versions/5709cf5c2772_baseline_migration.py create mode 100644 migrations/versions/8d1e6a7e959a_initial_migration.py delete mode 100644 migrations/versions/947179aaa97c_version_added.py delete mode 100644 migrations/versions/__pycache__/5709cf5c2772_baseline_migration.cpython-310.pyc create mode 100644 migrations/versions/__pycache__/8d1e6a7e959a_initial_migration.cpython-310.pyc delete mode 100644 migrations/versions/__pycache__/947179aaa97c_version_added.cpython-310.pyc diff --git a/db/__pycache__/models.cpython-310.pyc b/db/__pycache__/models.cpython-310.pyc index 04005c38f8303e70f1f4bf20f5c6bfd9ca5663dc..860a0bba3b49bbb876d8da094cc026becbc346f2 100644 GIT binary patch delta 2486 zcmaJ?ZA_C_6z(Zdpe<04O0j&kih?@D7}SA?2+=8FLq0@`C?9VJ2(<7*5hx1E=NO1^ z_!ufw`4CXIVcu-nM;6_dCCl9O$9`-hG1>ORWpUXw*|O;m-95K38e`TUPtU#QInTNG zo^$Ve?g#n!_i|sWkB?Huem6fWD*5N(4d0aHz!k;T%1WE1thV8pRV=C5WGSpPW8q$- z)SvuFgKnuo7*nH1MSKwED7894A$YdQOumAb;LZO11ZAcyTD1^{-(-iOF*^?L=Na*K z_Aj78gAjsu59RCqK_(Le2p+S{qMBd@4FdrS;|y4uqjgh*T58EzEr*pn>{f1Bco0!^ z1U?k;3PCJEFwHd}Bp5-9<)(EE`#S>Bk>fo%Z<2!^Zyfp1Ur(ewqE{1y0M_Nl;d}Yj zkeAX+kN^PlopVSl~I7tJqA0n@m;{g)!Y)OtS=Fv&92ev817}4Ii4r@UBVS z>?Ozr_g=Zl+w5*$vXw5j%EeZ@*gh_{uZ!*HVryJ%e`W_P=323AH=6>N2PUnHz0$=F zal`YNPZ?K?Xp8q$!#K1#gLH{Gd2jl}$y6r=#D=>OTWU?BBOW#Rm#ev-xULs*Wn0 zbNqT_;_a$n$Xp7R|ynbbx9uRMnCWqV#FY8h}!~f4UWF`E$<}aw}xied8&BC#w22qEp?Hizu>tTD{ zR-%a8X?c9tCusPhH>jQ*4Fq<6(l$KSu?1{We@7}nm2|IjzZ@EIUH5i|OQmb-{z6ej zA-k}uIua}1{2H3@4n>|Kw}aA85U2<;37W{!Owi)&3p@JuMuMFmS7WFV zYkEJ10^EG|0i5P)+xtwOXNd13!)GHw(z|_U0g7-}f2o!iwViy+Z)yh*`mX;~h0}|7 z27{zM13}>5%7}Z#^|XjKib>*&@53ET?Mtll=lpILH8jjcpmi`y#XbD8b~8Dhywkbz zE>sQ~po<^N?x8<5uT$(1f};clm@>QtO0aI&24(o$a2oVr+()5xHvhba6>2%r@S z($kT30ByK-G(y!$z9gFN!h@rsOv5^w$24w_CK-E~Z$VK~R$FQ-5*vxbm6i|SEI)t) z=l5%gD)tfZo_CUI19Z0J-q& z#VpUoIr`?oIgA_&XGq;Xn`6N7G$ru=_ygPWQo=-3R^#j`3W669X``n8OD@ zpI8m!EC;`a=r)-MxtKip5?sdWNdx2|P99KC{C5(Q&aFF8HKhj!mrI#?OL2(^%PLQ& zb~2Ut=}4Hy!f8@zo!+CkLZlu1s!ZYU(|VX<@v&0k6(4{hDQ%`5;;DzCHR2g6PA9J- z!7K&N5nLs>MsS_rZ314LcK~nB1wl7ItfaZz_5UBUTvcbFyxY028GJSu3o}ye)kJ94 zk~)3)Mf$FY2?Uu0IRpc=GBa7_1XhA^nxp4Pv=KBC*a-&7kx0-*!y&TxO*+TD1XMNA RK`=!y18B%viQd;c{{{Z*sSf}E delta 2607 zcmZ`)YfO`86z(ZdC=@7_aw|ou5{FI(F{p6}1I@f;69iF;f~Bocp|pH0cN9fY5m4{| z;*C2K;3{=jP?d)MQ)SuWd%7$=ssZ=HBzRg>Q-W$J6Kg-gBOFdC&WP zL-*u8_vK-Z(9mER`+YpIxqR^1#jrK2qvOpEyS=vB=58!?2<4_~o6F?3IjY5-t79Xe z3Dc_Q;;8~nf;G^hH~<#8C7|2SsI>Z91K68D-y6JApwk9flm}(TFiWr{g#CwGRQa;x zP<&iqT>mn`Zmo7Z1XqR4S?UxV<$}ph|GTYbw=2bFX-2!-fGm&J{Y4UJX*qIBs^Ba<0M=s;qi>?y}=T^ zTww`hQ#4nyl4uFC%%Slyc*kd+cYJP_=F8O}8g}jnm?Msx&a*$vH5aMhrOs}lZf6i% z#dr3Wxf3HbTFgyVtHNlF4@RgA^USI802%HzFOK4ld`u+Xd{!cpnbJl-nas6}`3A}S zK_%B_B>Q$w++$tqFOLIM+jZhY+hz#KqC^@g?0cA5855XI&)gth zkwZKHm)rF?S~)*%6FoMNX)dumg3Sc^ypHFUzo@xdIGM~!;;X6vg+~^H?eiq6f$PYd zl~`;yB)(Cd4!@Aki-k;p{GgS))bJI*yGFwDCSO2YFc3%Rtq|YY~IBoAsC=UoOZT{OPRj3KNt2%H5?{2;2XLZ(Z#qMtp>uByfWUSBZOsLjxM|-402{jZAAnl^Ib00%{-BNxtET_77aSfEr>u#Jz zT4*9@Mp!iu>hN)U8be=(8HZ8;YQ)OJ>*dglUv@0Vt6l8E$WvnQ(Q8nHw~qd)-^0@} z)w(MjLaE@Wvbr7wx>#nVCbzJcXY{JGFsYS-3{ys91R(_Z1mrI4BiQd5jut%Il>{xm z9PsC^AHjrIx*tIsSG#u12nWQ(<7}hM_-#*xmT%%AQkcC;4|7v%?}vfjRZLMui?@5D zAmSjid|y~ZvD?Wh8}}DSVoy&5>Tv~=EW|pGWas}SyYL}8Ra}lA;Z9(hQGUV^Pud+g z*k^zap7wWrf2hmJdj~-&feG*Qr$Gh!57feLY#R6gy7BtJT!z06^r^eagntb*l$TZP zI=K*_9lsb%R2`wUYiU_rAt%Tc3HrF^ zvas<0M!Y??1jZ&XeT|ezqpDWl{%|GeQ}MV#J%1p`aKb8}Ln`7;wHApc73~ zT3j+6yWkUAXFKQVsZXhwv70@^-d OgkX|jiuK^ax&Hvp1;;J` diff --git a/db/models.py b/db/models.py index 13c38ea..2a82f37 100644 --- a/db/models.py +++ b/db/models.py @@ -578,7 +578,7 @@ class GithubInstallations(Base): github_ids = Column(Text, nullable=True, comment="Identifiers on the github database, prolly won't be used") permissions_and_events = Column(Text, nullable=True) created_at = Column(DateTime, nullable=True) - organisation = Column(Text, ForeignKey('community_organisations.name'), nullable=True) + organisation = Column(Text, ForeignKey('community_orgs.name'), nullable=True) def __repr__(self): return f"" @@ -747,8 +747,8 @@ class MentorshipProgramSiteStructure(Base): id = Column(BigInteger, primary_key=True) product_id = Column(BigInteger, ForeignKey('product.id'), nullable=True) - project_id = Column(BigInteger, ForeignKey('mentorship_program_projects.id'), nullable=True) - contributor_id = Column(BigInteger, ForeignKey('mentorship_program_selected_contributors.id'), nullable=True) + project_id = Column(BigInteger, nullable=True) + contributor_id = Column(BigInteger, nullable=True) website_directory_label = Column(Text, nullable=True) directory_url = Column(Text, nullable=True) @@ -844,7 +844,7 @@ class MentorshipProgramWebsiteHasUpdated(Base): __tablename__ = 'mentorship_program_website_has_updated' id = Column(BigInteger, primary_key=True) - project_id = Column(BigInteger, ForeignKey('mentorship_program_projects.id'), nullable=True) + project_id = Column(BigInteger, nullable=True) week1_update_date = Column(DateTime, nullable=True) week2_update_date = Column(DateTime, nullable=True) week3_update_date = Column(DateTime, nullable=True) diff --git a/migrations/__pycache__/env.cpython-310.pyc b/migrations/__pycache__/env.cpython-310.pyc index 6fb27f85f28e4210f412a115b7bebe4dc0613c13..146b573f32df191d8c9e30c45598eac7ff7a1849 100644 GIT binary patch delta 747 zcmZ8f&ubGw6rMM`nc1E0kH#i#ENv;J6jni~qL-?Nf(I|XR1nugNHddkV|KSpw*G*Y z9^$n~-IKTEC<=o91NuLBS`a+@AM_ygZK_h8fybNg``+-qH}lT_=zFQ_3W96*%TD*J zIPn(X^U2lqqnL*rh$OPow_-cAfhfBtLV*bDPsL8?OjzD`LsvONg1kHOq&0@nS7J0R zZRL+CqM}!od>`~Z1!X=2#CY#gI3s;IC7qXMSi{Vf9%glAk7mb2KUPz<@=D3KiK?G` zzulmPKiF+)C7ZFzTC$b33UjkHP@k@WLBYG7lqwUlTOs#*GhZ&dn2%1MBfrU zn}0K&z*>IaT!cp6H5ZRwBax8|Ml^$-F$M{e9@9f|Kt|@6yjnkmtkPo#ka1~9^Vk5A zRYn#v*g0nQ7!5we`Iy8cLu8Zbbuzf}4ErnhqyJc0+5Sf@FC%~jn->d8QNZ%$%A1=| zusy9h(enp^Q3z}*QSB^B6YZ5(YGR;)-R9hua(?P+yY67@7(0boJw`U?Zo$7p7Mp|X>cer9clri0vr&g0LFzS8rF&cL4N^3 Ci=`s~ delta 812 zcmZ8d&rcLF6mB~+ZKvD)!NP(eh8QBmjmF4TkqaIPA#zaWLWXTwW}TfOomuq&;ec0S z$RsA-c2CB5(~I#>nLoh9lQ%DD@O2^hqfNj0-uJ!Nd3_)2AL_NVS``S^!sCtZcYa#C zj=!AVUOK`^A?3xa&jXH;<~={~L7}neSAxpeWw8vTt{fxq<*=&w2nRLo59`X;wGjc; z?==(~V6&>R<;NH_8ce9Vswnx&4Vu8Jss?OQ^WoG8nWwsG?^7PzM0)b{>v9v9^=vOR zT5b1r9;z@8Ki$U;!Ms>;q6rF}mwqQrcBAgVXj3gdJ8dpOeUD75_~tx2T7V#%0ua*0 zWV|V+KLrzb{@l!f(SdWd2rwVcDxB;MK#p=eBsuOm5gHMNbGPRya^f7K12m)~^!nZ* z&RLHi;M`Zv3A{UJLjk%Rv#rVq9`pn$*F`Qmb}&jBODMbZ0)}Dt)_<;YH~-1DG2}n` zJKt(;uejyCT_PpjH+naES^E2F8h7F_%i5$AFp@-f@+eJA6&|tPmd1gvX%s)an@7e4 zUh?%p9B)8X$u854ou_G9uvKQ#f}I^)S-H#!cVca3thPhCsSg)8vfdF_mN^JnG;Z$Jl diff --git a/migrations/env.py b/migrations/env.py index 040306a..7961305 100644 --- a/migrations/env.py +++ b/migrations/env.py @@ -2,7 +2,7 @@ from sqlalchemy import engine_from_config from sqlalchemy import pool -from db.models import shared_metadata +from db.models import shared_metadata, Base from alembic import context @@ -26,7 +26,8 @@ # for 'autogenerate' support # from myapp import mymodel # target_metadata = mymodel.Base.metadata -target_metadata = shared_metadata +# target_metadata = shared_metadata +target_metadata = Base.metadata # other values from the config, defined by the needs of env.py, # can be acquired: @@ -65,16 +66,28 @@ def run_migrations_online() -> None: and associate a connection with the context. """ - connectable = engine_from_config( - config.get_section(config.config_ini_section, {}), - prefix="sqlalchemy.", - poolclass=pool.NullPool, - ) - - with connectable.connect() as connection: + # connectable = engine_from_config( + # config.get_section(config.config_ini_section, {}), + # prefix="sqlalchemy.", + # poolclass=pool.NullPool, + # ) + + # with connectable.connect() as connection: + # context.configure( + # connection=connection, target_metadata=target_metadata + # ) + + # with context.begin_transaction(): + # context.run_migrations() + engine = engine_from_config( + config.get_section(config.config_ini_section), prefix='sqlalchemy.') + + with engine.connect() as connection: context.configure( - connection=connection, target_metadata=target_metadata - ) + connection=connection, + target_metadata=target_metadata, + compare_type=True + ) with context.begin_transaction(): context.run_migrations() diff --git a/migrations/versions/5709cf5c2772_baseline_migration.py b/migrations/versions/5709cf5c2772_baseline_migration.py deleted file mode 100644 index 0f39531..0000000 --- a/migrations/versions/5709cf5c2772_baseline_migration.py +++ /dev/null @@ -1,30 +0,0 @@ -"""Baseline migration - -Revision ID: 5709cf5c2772 -Revises: -Create Date: 2024-12-06 12:44:27.108268 - -""" -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision: str = '5709cf5c2772' -down_revision: Union[str, None] = None -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - pass - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - pass - # ### end Alembic commands ### diff --git a/migrations/versions/8d1e6a7e959a_initial_migration.py b/migrations/versions/8d1e6a7e959a_initial_migration.py new file mode 100644 index 0000000..db77404 --- /dev/null +++ b/migrations/versions/8d1e6a7e959a_initial_migration.py @@ -0,0 +1,1723 @@ +"""Initial migration + +Revision ID: 8d1e6a7e959a +Revises: +Create Date: 2024-12-18 18:12:00.911503 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision: str = '8d1e6a7e959a' +down_revision: Union[str, None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('github_profile_data', + sa.Column('github_username', sa.String(), nullable=False), + sa.Column('discord_id', sa.BigInteger(), nullable=False), + sa.Column('classroom_points', sa.Integer(), nullable=False), + sa.Column('prs_raised', sa.Integer(), nullable=False), + sa.Column('prs_reviewed', sa.Integer(), nullable=False), + sa.Column('prs_merged', sa.Integer(), nullable=False), + sa.Column('dpg_points', sa.Integer(), nullable=False), + sa.Column('milestone', sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint('github_username') + ) + op.create_table('leaderboard', + sa.Column('discord_id', sa.BigInteger(), autoincrement=False, nullable=False), + sa.Column('github_id', sa.BigInteger(), nullable=False), + sa.Column('github_url', sa.Text(), nullable=False), + sa.Column('apprentice_badge', sa.Boolean(), nullable=True), + sa.Column('converser_badge', sa.Boolean(), nullable=False), + sa.Column('rockstar_badge', sa.Boolean(), nullable=False), + sa.Column('enthusiast_badge', sa.Boolean(), nullable=False), + sa.Column('rising_star_badge', sa.Boolean(), nullable=False), + sa.Column('github_x_discord_badge', sa.Boolean(), nullable=False), + sa.Column('points', sa.Integer(), nullable=False), + sa.Column('bronze_badge', sa.Boolean(), nullable=False), + sa.Column('silver_badge', sa.Boolean(), nullable=False), + sa.Column('gold_badge', sa.Boolean(), nullable=False), + sa.Column('ruby_badge', sa.Boolean(), nullable=False), + sa.Column('diamond_badge', sa.Boolean(), nullable=False), + sa.Column('certificate_link', sa.Text(), nullable=True), + sa.PrimaryKeyConstraint('discord_id') + ) + op.create_table('role_master', + sa.Column('id', sa.BigInteger(), nullable=False), + sa.Column('created_at', db.models.DateTime(), nullable=False), + sa.Column('updated_at', db.models.DateTime(), nullable=True), + sa.Column('role', sa.Text(), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('unstructured_discord_data', + sa.Column('text', sa.Text(), nullable=True), + sa.Column('author', sa.BigInteger(), nullable=True), + sa.Column('channel', sa.BigInteger(), nullable=True), + sa.Column('channel_name', sa.Text(), nullable=True), + sa.Column('uuid', sa.String(length=36), nullable=False), + sa.Column('author_name', sa.Text(), nullable=True), + sa.Column('author_roles', sa.Text(), nullable=True), + sa.Column('sent_at', sa.Text(), nullable=True), + sa.PrimaryKeyConstraint('uuid') + ) + op.create_table('user_points_mapping', + sa.Column('id', sa.UUID(), nullable=False), + sa.Column('contributor', sa.BigInteger(), nullable=True), + sa.Column('points', sa.Integer(), nullable=False), + sa.Column('level', sa.String(length=50), nullable=True), + sa.Column('created_at', db.models.DateTime(), nullable=False), + sa.Column('updated_at', db.models.DateTime(), nullable=False), + sa.Column('mentor_id', sa.BigInteger(), nullable=True), + sa.ForeignKeyConstraint(['contributor'], ['contributors_registration.id'], ), + sa.ForeignKeyConstraint(['mentor_id'], ['mentor_details.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.drop_table('__contributors_vc') + op.drop_table('__mentors') + op.drop_table('__mentorship_program_ticket_comments') + op.drop_table('__mentorship_program_pull_request') + op.drop_table('__mentorship_program_tickets') + op.drop_table('__community_program_unique_user_data') + op.drop_table('__contributors_discord') + op.drop_table('__applicant') + op.drop_table('__dashboard_config') + op.drop_table('__mentorship_program_projects') + op.drop_table('__comments') + op.drop_table('__dev_onboarding') + op.drop_table('contributors_registration_old') + op.drop_table('__pull_requests') + op.drop_table('__community_program_tickets') + op.drop_table('__community_organisations') + op.drop_table('__mentorship_program_selected_contributors') + op.drop_table('__community_program_product_wise_tickets') + op.drop_table('unstructured discord data') + op.alter_column('app_comments', 'id', + existing_type=sa.UUID(), + type_=sa.BigInteger(), + existing_nullable=False, + autoincrement=True, + existing_server_default=sa.text('gen_random_uuid()')) + op.alter_column('app_comments', 'updated_at', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text('now()')) + op.alter_column('app_comments', 'issue_id', + existing_type=sa.BIGINT(), + nullable=True) + op.alter_column('badges', 'id', + existing_type=sa.INTEGER(), + type_=sa.UUID(), + existing_nullable=False) + op.alter_column('badges', 'created_at', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text('CURRENT_TIMESTAMP')) + op.alter_column('badges', 'updated_at', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text('CURRENT_TIMESTAMP')) + op.alter_column('ccbp_tickets', 'created_at', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text('now()')) + op.alter_column('ccbp_tickets', 'issue_id', + existing_type=sa.BIGINT(), + nullable=True) + op.alter_column('ccbp_tickets', 'index', + existing_type=sa.SMALLINT(), + server_default=None, + existing_nullable=False, + autoincrement=True) + op.alter_column('ccbp_tickets', 'closed_at', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_comment='date-time at which issue was closed', + existing_nullable=True) + op.alter_column('chapters', 'org_name', + existing_type=sa.TEXT(), + nullable=True) + op.alter_column('chapters', 'discord_role_id', + existing_type=sa.BIGINT(), + nullable=True, + comment='db id of the corresponding member role in discord server', + existing_comment='db od of the corresponding member role in discord server') + op.alter_column('chapters', 'created_at', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True) + op.alter_column('community_orgs', 'name', + existing_type=sa.TEXT(), + nullable=True) + op.alter_column('connected_prs', 'created_at', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text('now()')) + op.alter_column('connected_prs', 'raised_at', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=False) + op.alter_column('connected_prs', 'merged_at', + existing_type=postgresql.TIMESTAMP(), + type_=sa.Text(), + existing_nullable=True) + op.alter_column('contributor_names', 'id', + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True) + op.add_column('contributor_points', sa.Column('contributors_id', sa.BigInteger(), nullable=True)) + op.drop_constraint('contributor_points_contributors_id_fkey', 'contributor_points', type_='foreignkey') + op.create_foreign_key(None, 'contributor_points', 'contributors_registration', ['contributors_id'], ['id']) + op.drop_column('contributor_points', 'user_id') + op.alter_column('contributors_discord', 'id', + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True) + op.alter_column('contributors_discord', 'joined_at', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=False, + existing_server_default=sa.text('now()')) + op.drop_column('contributors_discord', 'city') + op.drop_column('contributors_discord', 'country') + op.drop_column('contributors_discord', 'experience') + op.alter_column('contributors_registration', 'id', + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True) + op.alter_column('contributors_registration', 'joined_at', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=False, + existing_server_default=sa.text('now()')) + op.drop_table_comment( + 'contributors_registration', + existing_comment='This is a duplicate of contributors_registration_old', + schema=None + ) + op.add_column('discord_engagement', sa.Column('converserbadge', sa.Boolean(), nullable=True)) + op.add_column('discord_engagement', sa.Column('apprenticebadge', sa.Boolean(), nullable=True)) + op.add_column('discord_engagement', sa.Column('rockstarbadge', sa.Boolean(), nullable=True)) + op.add_column('discord_engagement', sa.Column('enthusiastbadge', sa.Boolean(), nullable=True)) + op.add_column('discord_engagement', sa.Column('risingstarbadge', sa.Boolean(), nullable=True)) + op.alter_column('discord_engagement', 'id', + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True) + op.alter_column('discord_engagement', 'created_at', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text('now()')) + op.drop_column('discord_engagement', 'apprenticeBadge') + op.drop_column('discord_engagement', 'converserBadge') + op.drop_column('discord_engagement', 'risingStarBadge') + op.drop_column('discord_engagement', 'enthusiastBadge') + op.drop_column('discord_engagement', 'rockstarBadge') + op.alter_column('dmp_issue_updates', 'created_at', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=False, + existing_server_default=sa.text('now()')) + op.alter_column('dmp_issue_updates', 'comment_updated_at', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True) + op.drop_constraint('dmp_issue_updates_comment_id_key', 'dmp_issue_updates', type_='unique') + op.drop_constraint('dmp_issue_updates_dmp_id_fkey', 'dmp_issue_updates', type_='foreignkey') + op.create_foreign_key(None, 'dmp_issue_updates', 'dmp_issues', ['dmp_id'], ['id']) + op.alter_column('dmp_issues', 'id', + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True) + op.drop_constraint('dmp_issues_dmp_id_key', 'dmp_issues', type_='unique') + op.drop_constraint('dmp_issues_org_id_fkey', 'dmp_issues', type_='foreignkey') + op.create_foreign_key(None, 'dmp_issues', 'dmp_orgs', ['org_id'], ['id']) + op.drop_column('dmp_issues', 'repo_owner') + op.add_column('dmp_orgs', sa.Column('version', sa.Text(), nullable=True)) + op.alter_column('dmp_orgs', 'id', + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True) + op.alter_column('dmp_orgs', 'created_at', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=False, + existing_server_default=sa.text('now()')) + op.drop_constraint('dmp_orgs_id_key', 'dmp_orgs', type_='unique') + op.alter_column('dmp_pr_updates', 'created_at', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=False, + existing_server_default=sa.text('now()')) + op.alter_column('dmp_pr_updates', 'pr_updated_at', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True) + op.alter_column('dmp_pr_updates', 'merged_at', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True) + op.alter_column('dmp_pr_updates', 'closed_at', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True) + op.drop_constraint('dmp_pr_updates_pr_id_key', 'dmp_pr_updates', type_='unique') + op.drop_constraint('dmp_pr_updates_dmp_id_fkey', 'dmp_pr_updates', type_='foreignkey') + op.create_foreign_key(None, 'dmp_pr_updates', 'dmp_issues', ['dmp_id'], ['id']) + op.alter_column('dmp_tickets', 'created_at', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text('now()')) + op.alter_column('dmp_tickets', 'ticket_points', + existing_type=sa.SMALLINT(), + type_=sa.Integer(), + existing_comment='How many points the ticket is worth', + existing_nullable=True, + existing_server_default=sa.text("'0'::smallint")) + op.alter_column('dmp_tickets', 'index', + existing_type=sa.SMALLINT(), + server_default=None, + type_=sa.Integer(), + existing_nullable=False, + autoincrement=True) + op.alter_column('dmp_week_updates', 'id', + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True) + op.drop_constraint('dmp_week_updates_dmp_id_fkey', 'dmp_week_updates', type_='foreignkey') + op.alter_column('github_classroom_data', 'id', + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True) + op.alter_column('github_classroom_data', 'created_at', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=False, + existing_server_default=sa.text('now()')) + op.alter_column('github_classroom_data', 'submission_timestamp', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=False) + op.alter_column('github_classroom_data', 'points_awarded', + existing_type=sa.VARCHAR(), + type_=sa.Integer(), + existing_nullable=True) + op.alter_column('github_classroom_data', 'points_available', + existing_type=sa.VARCHAR(), + type_=sa.Integer(), + existing_nullable=True) + op.create_table_comment( + 'github_classroom_data', + 'Table for saving the details about github classroom assignment data', + existing_comment='Table for save the details about github classroom assignment datas', + schema=None + ) + op.alter_column('github_installations', 'id', + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True) + op.alter_column('github_installations', 'github_ids', + existing_type=postgresql.JSON(astext_type=sa.Text()), + type_=sa.Text(), + comment="Identifiers on the github database, prolly won't be used", + existing_comment="identifiers on the github database, prolly won't be used", + existing_nullable=True) + op.alter_column('github_installations', 'permissions_and_events', + existing_type=postgresql.JSON(astext_type=sa.Text()), + type_=sa.Text(), + existing_nullable=True) + op.alter_column('github_installations', 'created_at', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True) + op.drop_constraint('github_installations_organisation_fkey', 'github_installations', type_='foreignkey') + op.create_foreign_key(None, 'github_installations', 'community_orgs', ['organisation'], ['name']) + op.alter_column('github_organisations_to_organisations', 'id', + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True) + op.alter_column('github_organisations_to_organisations', 'created_at', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + comment='Creation date of organization ticket', + existing_comment='creation date of organization ticket', + existing_nullable=True) + op.alter_column('issue_contributors', 'id', + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False, + autoincrement=True) + op.alter_column('issue_contributors', 'contributor_id', + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + nullable=True) + op.alter_column('issue_contributors', 'issue_id', + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False) + op.alter_column('issue_contributors', 'created_at', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text('CURRENT_TIMESTAMP')) + op.alter_column('issue_contributors', 'updated_at', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text('CURRENT_TIMESTAMP')) + op.drop_constraint('unique_issue_id_contributors', 'issue_contributors', type_='unique') + op.drop_constraint('issue_contributors_contributor_id_fkey', 'issue_contributors', type_='foreignkey') + op.create_foreign_key(None, 'issue_contributors', 'contributors_registration', ['contributor_id'], ['id']) + op.create_foreign_key(None, 'issue_contributors', 'role_master', ['role'], ['id']) + op.alter_column('issue_mentors', 'id', + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False, + autoincrement=True, + existing_server_default=sa.text("nextval('issue_mentors_id_seq'::regclass)")) + op.alter_column('issue_mentors', 'issue_id', + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + nullable=True) + op.alter_column('issue_mentors', 'angel_mentor_id', + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=True) + op.drop_constraint('unique_issue_id_mentors', 'issue_mentors', type_='unique') + op.alter_column('issues', 'id', + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False, + autoincrement=True, + existing_server_default=sa.text("nextval('issues_id_seq'::regclass)")) + op.drop_constraint('issues_org_id_fkey', 'issues', type_='foreignkey') + op.create_foreign_key(None, 'issues', 'community_orgs', ['org_id'], ['id']) + op.alter_column('mentor_details', 'id', + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False, + autoincrement=True, + existing_server_default=sa.text("nextval('mentor_details_id_seq'::regclass)")) + op.alter_column('mentor_not_added', 'id', + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True) + op.add_column('mentorship_program_site_structure', sa.Column('product_id', sa.BigInteger(), nullable=True)) + op.add_column('mentorship_program_site_structure', sa.Column('project_id', sa.BigInteger(), nullable=True)) + op.add_column('mentorship_program_site_structure', sa.Column('contributor_id', sa.BigInteger(), nullable=True)) + op.add_column('mentorship_program_site_structure', sa.Column('website_directory_label', sa.Text(), nullable=True)) + op.alter_column('mentorship_program_site_structure', 'id', + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True) + op.drop_constraint('mentorship_program_site_structure_project_fkey', 'mentorship_program_site_structure', type_='foreignkey') + op.drop_constraint('mentorship_program_site_structure_product_fkey', 'mentorship_program_site_structure', type_='foreignkey') + op.drop_constraint('mentorship_program_site_structure_contributor_fkey', 'mentorship_program_site_structure', type_='foreignkey') + op.create_foreign_key(None, 'mentorship_program_site_structure', 'product', ['product_id'], ['id']) + op.drop_table_comment( + 'mentorship_program_site_structure', + existing_comment='a mapping for the milestones website structure', + schema=None + ) + op.drop_column('mentorship_program_site_structure', 'project') + op.drop_column('mentorship_program_site_structure', 'product') + op.drop_column('mentorship_program_site_structure', 'website directory_label') + op.drop_column('mentorship_program_site_structure', 'contributor') + op.alter_column('mentorship_program_website_comments', 'created_at', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True) + op.alter_column('mentorship_program_website_comments', 'updated_at', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True) + op.alter_column('mentorship_program_website_commits', 'date', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True) + op.alter_column('mentorship_program_website_commits', 'files', + existing_type=postgresql.JSON(astext_type=sa.Text()), + type_=sa.Text(), + existing_nullable=True) + op.add_column('mentorship_program_website_has_updated', sa.Column('project_id', sa.BigInteger(), nullable=True)) + op.alter_column('mentorship_program_website_has_updated', 'id', + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True) + op.alter_column('mentorship_program_website_has_updated', 'week1_update_date', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True) + op.alter_column('mentorship_program_website_has_updated', 'week2_update_date', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True) + op.alter_column('mentorship_program_website_has_updated', 'week3_update_date', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True) + op.alter_column('mentorship_program_website_has_updated', 'week4_update_date', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True) + op.alter_column('mentorship_program_website_has_updated', 'week5_update_date', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True) + op.alter_column('mentorship_program_website_has_updated', 'week6_update_date', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True) + op.alter_column('mentorship_program_website_has_updated', 'week7_update_date', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True) + op.alter_column('mentorship_program_website_has_updated', 'week8_update_date', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True) + op.alter_column('mentorship_program_website_has_updated', 'week9_update_date', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True) + op.drop_constraint('mentorship_program_website_has_updated_project_fkey', 'mentorship_program_website_has_updated', type_='foreignkey') + op.drop_column('mentorship_program_website_has_updated', 'project') + op.alter_column('point_system', 'id', + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True) + op.alter_column('point_transactions', 'id', + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False, + autoincrement=True) + op.alter_column('point_transactions', 'user_id', + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=True) + op.alter_column('point_transactions', 'issue_id', + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False) + op.alter_column('point_transactions', 'created_at', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + nullable=False, + existing_server_default=sa.text('CURRENT_TIMESTAMP')) + op.alter_column('point_transactions', 'updated_at', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + nullable=False, + existing_server_default=sa.text('CURRENT_TIMESTAMP')) + op.alter_column('point_transactions', 'angel_mentor_id', + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=True) + op.drop_constraint('point_transactions_user_id_fkey', 'point_transactions', type_='foreignkey') + op.create_foreign_key(None, 'point_transactions', 'mentor_details', ['angel_mentor_id'], ['id']) + op.create_foreign_key(None, 'point_transactions', 'contributors_registration', ['user_id'], ['id']) + op.alter_column('points_mapping', 'id', + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False, + autoincrement=True) + op.alter_column('points_mapping', 'role', + existing_type=sa.TEXT(), + type_=sa.String(length=50), + nullable=False) + op.alter_column('points_mapping', 'complexity', + existing_type=sa.TEXT(), + type_=sa.String(length=50), + nullable=False) + op.alter_column('points_mapping', 'points', + existing_type=sa.INTEGER(), + nullable=False) + op.alter_column('points_mapping', 'created_at', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text('CURRENT_TIMESTAMP')) + op.alter_column('points_mapping', 'updated_at', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text('CURRENT_TIMESTAMP')) + op.alter_column('pr_history', 'id', + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False, + autoincrement=True, + existing_server_default=sa.text("nextval('pr_history_id_seq'::regclass)")) + op.alter_column('pr_history', 'created_at', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text('now()')) + op.alter_column('pr_history', 'raised_at', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=False) + op.alter_column('pr_history', 'pr_id', + existing_type=sa.BIGINT(), + comment=None, + existing_comment='github id of the pr', + existing_nullable=False) + op.drop_table_comment( + 'pr_history', + existing_comment='Holds records of pr webhooks', + schema=None + ) + op.drop_column('pr_history', 'points') + op.alter_column('pr_staging', 'id', + existing_type=sa.UUID(), + type_=sa.String(length=36), + existing_nullable=False, + existing_server_default=sa.text('gen_random_uuid()')) + op.alter_column('pr_staging', 'created_at', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text('now()')) + op.alter_column('pr_staging', 'raised_at', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=False) + op.alter_column('pr_staging', 'pr_id', + existing_type=sa.BIGINT(), + comment=None, + existing_comment='github id of the pr', + existing_nullable=False) + op.drop_table_comment( + 'pr_staging', + existing_comment='This is a duplicate of connected_prs', + schema=None + ) + op.add_column('product', sa.Column('channel_id', sa.BigInteger(), nullable=True)) + op.alter_column('product', 'id', + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True) + op.alter_column('product', 'description', + existing_type=sa.TEXT(), + comment=None, + existing_comment='URL to the product entry on C4GT wiki', + existing_nullable=True, + existing_server_default=sa.text("''::text")) + op.drop_constraint('product_channel_fkey', 'product', type_='foreignkey') + op.create_foreign_key(None, 'product', 'discord_channels', ['channel_id'], ['channel_id']) + op.drop_table_comment( + 'product', + existing_comment="A table containing all 'Products' in C4GT 2023", + schema=None + ) + op.drop_column('product', 'channel') + op.alter_column('ticket_comments', 'created_at', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True) + op.alter_column('ticket_comments', 'updated_at', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True) + op.alter_column('unlisted_tickets', 'created_at', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text('now()')) + op.alter_column('unlisted_tickets', 'ticket_points', + existing_type=sa.SMALLINT(), + comment=None, + existing_comment='How many points the ticket is worth', + existing_nullable=True, + existing_server_default=sa.text("'0'::smallint")) + op.alter_column('unlisted_tickets', 'index', + existing_type=sa.SMALLINT(), + server_default=None, + existing_nullable=False) + op.alter_column('unlisted_tickets', 'uuid', + existing_type=sa.UUID(), + type_=sa.String(length=36), + existing_nullable=False, + existing_server_default=sa.text('gen_random_uuid()')) + op.create_unique_constraint(None, 'unlisted_tickets', ['uuid', 'issue_id']) + op.add_column('user_activity', sa.Column('contributor_id', sa.BigInteger(), nullable=False)) + op.alter_column('user_activity', 'id', + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False, + autoincrement=True) + op.alter_column('user_activity', 'issue_id', + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False) + op.alter_column('user_activity', 'created_at', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text('CURRENT_TIMESTAMP')) + op.alter_column('user_activity', 'updated_at', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text('CURRENT_TIMESTAMP')) + op.alter_column('user_activity', 'mentor_id', + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=True) + op.drop_constraint('user_activity_user_id_fkey', 'user_activity', type_='foreignkey') + op.drop_constraint('user_activity_mentor_id_fkey', 'user_activity', type_='foreignkey') + op.create_foreign_key(None, 'user_activity', 'contributors_registration', ['contributor_id'], ['id']) + op.create_foreign_key(None, 'user_activity', 'mentor_details', ['mentor_id'], ['id']) + op.drop_column('user_activity', 'user_id') + op.alter_column('user_badges', 'id', + existing_type=sa.INTEGER(), + type_=sa.UUID(), + existing_nullable=False) + op.alter_column('user_badges', 'user_id', + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False) + op.alter_column('user_badges', 'badge_id', + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False) + op.alter_column('user_badges', 'created_at', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text('CURRENT_TIMESTAMP')) + op.alter_column('user_badges', 'updated_at', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text('CURRENT_TIMESTAMP')) + op.alter_column('user_certificates', 'id', + existing_type=sa.INTEGER(), + type_=sa.UUID(), + existing_nullable=False) + op.alter_column('user_certificates', 'user_id', + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False) + op.alter_column('user_certificates', 'created_at', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text('CURRENT_TIMESTAMP')) + op.alter_column('user_certificates', 'updated_at', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text('CURRENT_TIMESTAMP')) + op.alter_column('users', 'id', + existing_type=sa.INTEGER(), + type_=sa.BigInteger(), + existing_nullable=False, + autoincrement=True, + existing_server_default=sa.text("nextval('users_id_seq'::regclass)")) + op.alter_column('users', 'name', + existing_type=sa.TEXT(), + nullable=True) + op.alter_column('users', 'created_at', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text('CURRENT_TIMESTAMP')) + op.alter_column('users', 'updated_at', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=True, + existing_server_default=sa.text('CURRENT_TIMESTAMP')) + op.create_unique_constraint(None, 'users', ['discord']) + op.alter_column('vc_logs', 'id', + existing_type=sa.BIGINT(), + server_default=None, + existing_nullable=False, + autoincrement=True) + op.alter_column('vc_logs', 'created_at', + existing_type=postgresql.TIMESTAMP(timezone=True), + type_=db.models.DateTime(), + existing_nullable=False, + existing_server_default=sa.text('now()')) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column('vc_logs', 'created_at', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=False, + existing_server_default=sa.text('now()')) + op.alter_column('vc_logs', 'id', + existing_type=sa.BIGINT(), + server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), + existing_nullable=False, + autoincrement=True) + op.drop_constraint(None, 'users', type_='unique') + op.alter_column('users', 'updated_at', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text('CURRENT_TIMESTAMP')) + op.alter_column('users', 'created_at', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text('CURRENT_TIMESTAMP')) + op.alter_column('users', 'name', + existing_type=sa.TEXT(), + nullable=False) + op.alter_column('users', 'id', + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False, + autoincrement=True, + existing_server_default=sa.text("nextval('users_id_seq'::regclass)")) + op.alter_column('user_certificates', 'updated_at', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text('CURRENT_TIMESTAMP')) + op.alter_column('user_certificates', 'created_at', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text('CURRENT_TIMESTAMP')) + op.alter_column('user_certificates', 'user_id', + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False) + op.alter_column('user_certificates', 'id', + existing_type=sa.UUID(), + type_=sa.INTEGER(), + existing_nullable=False) + op.alter_column('user_badges', 'updated_at', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text('CURRENT_TIMESTAMP')) + op.alter_column('user_badges', 'created_at', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text('CURRENT_TIMESTAMP')) + op.alter_column('user_badges', 'badge_id', + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False) + op.alter_column('user_badges', 'user_id', + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False) + op.alter_column('user_badges', 'id', + existing_type=sa.UUID(), + type_=sa.INTEGER(), + existing_nullable=False) + op.add_column('user_activity', sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=False)) + op.drop_constraint(None, 'user_activity', type_='foreignkey') + op.drop_constraint(None, 'user_activity', type_='foreignkey') + op.create_foreign_key('user_activity_mentor_id_fkey', 'user_activity', 'mentor_details', ['mentor_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE') + op.create_foreign_key('user_activity_user_id_fkey', 'user_activity', 'users', ['user_id'], ['id']) + op.alter_column('user_activity', 'mentor_id', + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=True) + op.alter_column('user_activity', 'updated_at', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text('CURRENT_TIMESTAMP')) + op.alter_column('user_activity', 'created_at', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text('CURRENT_TIMESTAMP')) + op.alter_column('user_activity', 'issue_id', + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False) + op.alter_column('user_activity', 'id', + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False, + autoincrement=True) + op.drop_column('user_activity', 'contributor_id') + op.drop_constraint(None, 'unlisted_tickets', type_='unique') + op.alter_column('unlisted_tickets', 'uuid', + existing_type=sa.String(length=36), + type_=sa.UUID(), + existing_nullable=False, + existing_server_default=sa.text('gen_random_uuid()')) + op.alter_column('unlisted_tickets', 'index', + existing_type=sa.SMALLINT(), + server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=32767, cycle=False, cache=1), + existing_nullable=False) + op.alter_column('unlisted_tickets', 'ticket_points', + existing_type=sa.SMALLINT(), + comment='How many points the ticket is worth', + existing_nullable=True, + existing_server_default=sa.text("'0'::smallint")) + op.alter_column('unlisted_tickets', 'created_at', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text('now()')) + op.alter_column('ticket_comments', 'updated_at', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True) + op.alter_column('ticket_comments', 'created_at', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True) + op.add_column('product', sa.Column('channel', sa.BIGINT(), autoincrement=False, nullable=True)) + op.create_table_comment( + 'product', + "A table containing all 'Products' in C4GT 2023", + existing_comment=None, + schema=None + ) + op.drop_constraint(None, 'product', type_='foreignkey') + op.create_foreign_key('product_channel_fkey', 'product', 'discord_channels', ['channel'], ['channel_id']) + op.alter_column('product', 'description', + existing_type=sa.TEXT(), + comment='URL to the product entry on C4GT wiki', + existing_nullable=True, + existing_server_default=sa.text("''::text")) + op.alter_column('product', 'id', + existing_type=sa.BIGINT(), + server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), + existing_nullable=False, + autoincrement=True) + op.drop_column('product', 'channel_id') + op.create_table_comment( + 'pr_staging', + 'This is a duplicate of connected_prs', + existing_comment=None, + schema=None + ) + op.alter_column('pr_staging', 'pr_id', + existing_type=sa.BIGINT(), + comment='github id of the pr', + existing_nullable=False) + op.alter_column('pr_staging', 'raised_at', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=False) + op.alter_column('pr_staging', 'created_at', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text('now()')) + op.alter_column('pr_staging', 'id', + existing_type=sa.String(length=36), + type_=sa.UUID(), + existing_nullable=False, + existing_server_default=sa.text('gen_random_uuid()')) + op.add_column('pr_history', sa.Column('points', sa.SMALLINT(), server_default=sa.text("'10'::smallint"), autoincrement=False, nullable=False)) + op.create_table_comment( + 'pr_history', + 'Holds records of pr webhooks', + existing_comment=None, + schema=None + ) + op.alter_column('pr_history', 'pr_id', + existing_type=sa.BIGINT(), + comment='github id of the pr', + existing_nullable=False) + op.alter_column('pr_history', 'raised_at', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=False) + op.alter_column('pr_history', 'created_at', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text('now()')) + op.alter_column('pr_history', 'id', + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False, + autoincrement=True, + existing_server_default=sa.text("nextval('pr_history_id_seq'::regclass)")) + op.alter_column('points_mapping', 'updated_at', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text('CURRENT_TIMESTAMP')) + op.alter_column('points_mapping', 'created_at', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text('CURRENT_TIMESTAMP')) + op.alter_column('points_mapping', 'points', + existing_type=sa.INTEGER(), + nullable=True) + op.alter_column('points_mapping', 'complexity', + existing_type=sa.String(length=50), + type_=sa.TEXT(), + nullable=True) + op.alter_column('points_mapping', 'role', + existing_type=sa.String(length=50), + type_=sa.TEXT(), + nullable=True) + op.alter_column('points_mapping', 'id', + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False, + autoincrement=True) + op.drop_constraint(None, 'point_transactions', type_='foreignkey') + op.drop_constraint(None, 'point_transactions', type_='foreignkey') + op.create_foreign_key('point_transactions_user_id_fkey', 'point_transactions', 'contributors_registration', ['user_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE') + op.alter_column('point_transactions', 'angel_mentor_id', + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=True) + op.alter_column('point_transactions', 'updated_at', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + nullable=True, + existing_server_default=sa.text('CURRENT_TIMESTAMP')) + op.alter_column('point_transactions', 'created_at', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + nullable=True, + existing_server_default=sa.text('CURRENT_TIMESTAMP')) + op.alter_column('point_transactions', 'issue_id', + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False) + op.alter_column('point_transactions', 'user_id', + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=True) + op.alter_column('point_transactions', 'id', + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False, + autoincrement=True) + op.alter_column('point_system', 'id', + existing_type=sa.BIGINT(), + server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), + existing_nullable=False, + autoincrement=True) + op.add_column('mentorship_program_website_has_updated', sa.Column('project', sa.TEXT(), autoincrement=False, nullable=True)) + op.create_foreign_key('mentorship_program_website_has_updated_project_fkey', 'mentorship_program_website_has_updated', '__mentorship_program_projects', ['project'], ['name']) + op.alter_column('mentorship_program_website_has_updated', 'week9_update_date', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True) + op.alter_column('mentorship_program_website_has_updated', 'week8_update_date', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True) + op.alter_column('mentorship_program_website_has_updated', 'week7_update_date', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True) + op.alter_column('mentorship_program_website_has_updated', 'week6_update_date', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True) + op.alter_column('mentorship_program_website_has_updated', 'week5_update_date', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True) + op.alter_column('mentorship_program_website_has_updated', 'week4_update_date', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True) + op.alter_column('mentorship_program_website_has_updated', 'week3_update_date', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True) + op.alter_column('mentorship_program_website_has_updated', 'week2_update_date', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True) + op.alter_column('mentorship_program_website_has_updated', 'week1_update_date', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True) + op.alter_column('mentorship_program_website_has_updated', 'id', + existing_type=sa.BIGINT(), + server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), + existing_nullable=False, + autoincrement=True) + op.drop_column('mentorship_program_website_has_updated', 'project_id') + op.alter_column('mentorship_program_website_commits', 'files', + existing_type=sa.Text(), + type_=postgresql.JSON(astext_type=sa.Text()), + existing_nullable=True) + op.alter_column('mentorship_program_website_commits', 'date', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True) + op.alter_column('mentorship_program_website_comments', 'updated_at', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True) + op.alter_column('mentorship_program_website_comments', 'created_at', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True) + op.add_column('mentorship_program_site_structure', sa.Column('contributor', sa.TEXT(), autoincrement=False, nullable=True)) + op.add_column('mentorship_program_site_structure', sa.Column('website directory_label', sa.TEXT(), autoincrement=False, nullable=True)) + op.add_column('mentorship_program_site_structure', sa.Column('product', sa.TEXT(), autoincrement=False, nullable=True)) + op.add_column('mentorship_program_site_structure', sa.Column('project', sa.TEXT(), autoincrement=False, nullable=True)) + op.create_table_comment( + 'mentorship_program_site_structure', + 'a mapping for the milestones website structure', + existing_comment=None, + schema=None + ) + op.drop_constraint(None, 'mentorship_program_site_structure', type_='foreignkey') + op.create_foreign_key('mentorship_program_site_structure_contributor_fkey', 'mentorship_program_site_structure', '__mentorship_program_selected_contributors', ['contributor'], ['name']) + op.create_foreign_key('mentorship_program_site_structure_product_fkey', 'mentorship_program_site_structure', 'product', ['product'], ['name']) + op.create_foreign_key('mentorship_program_site_structure_project_fkey', 'mentorship_program_site_structure', '__mentorship_program_projects', ['project'], ['name']) + op.alter_column('mentorship_program_site_structure', 'id', + existing_type=sa.BIGINT(), + server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), + existing_nullable=False, + autoincrement=True) + op.drop_column('mentorship_program_site_structure', 'website_directory_label') + op.drop_column('mentorship_program_site_structure', 'contributor_id') + op.drop_column('mentorship_program_site_structure', 'project_id') + op.drop_column('mentorship_program_site_structure', 'product_id') + op.alter_column('mentor_not_added', 'id', + existing_type=sa.BIGINT(), + server_default=sa.Identity(always=True, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), + existing_nullable=False, + autoincrement=True) + op.alter_column('mentor_details', 'id', + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False, + autoincrement=True, + existing_server_default=sa.text("nextval('mentor_details_id_seq'::regclass)")) + op.drop_constraint(None, 'issues', type_='foreignkey') + op.create_foreign_key('issues_org_id_fkey', 'issues', 'community_orgs', ['org_id'], ['id'], onupdate='CASCADE', ondelete='SET NULL') + op.alter_column('issues', 'id', + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False, + autoincrement=True, + existing_server_default=sa.text("nextval('issues_id_seq'::regclass)")) + op.create_unique_constraint('unique_issue_id_mentors', 'issue_mentors', ['issue_id']) + op.alter_column('issue_mentors', 'angel_mentor_id', + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=True) + op.alter_column('issue_mentors', 'issue_id', + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + nullable=False) + op.alter_column('issue_mentors', 'id', + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False, + autoincrement=True, + existing_server_default=sa.text("nextval('issue_mentors_id_seq'::regclass)")) + op.drop_constraint(None, 'issue_contributors', type_='foreignkey') + op.drop_constraint(None, 'issue_contributors', type_='foreignkey') + op.create_foreign_key('issue_contributors_contributor_id_fkey', 'issue_contributors', 'contributors_registration', ['contributor_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE') + op.create_unique_constraint('unique_issue_id_contributors', 'issue_contributors', ['issue_id']) + op.alter_column('issue_contributors', 'updated_at', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text('CURRENT_TIMESTAMP')) + op.alter_column('issue_contributors', 'created_at', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text('CURRENT_TIMESTAMP')) + op.alter_column('issue_contributors', 'issue_id', + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False) + op.alter_column('issue_contributors', 'contributor_id', + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + nullable=False) + op.alter_column('issue_contributors', 'id', + existing_type=sa.BigInteger(), + type_=sa.INTEGER(), + existing_nullable=False, + autoincrement=True) + op.alter_column('github_organisations_to_organisations', 'created_at', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + comment='creation date of organization ticket', + existing_comment='Creation date of organization ticket', + existing_nullable=True) + op.alter_column('github_organisations_to_organisations', 'id', + existing_type=sa.BIGINT(), + server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), + existing_nullable=False, + autoincrement=True) + op.drop_constraint(None, 'github_installations', type_='foreignkey') + op.create_foreign_key('github_installations_organisation_fkey', 'github_installations', '__community_organisations', ['organisation'], ['name'], onupdate='CASCADE') + op.alter_column('github_installations', 'created_at', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True) + op.alter_column('github_installations', 'permissions_and_events', + existing_type=sa.Text(), + type_=postgresql.JSON(astext_type=sa.Text()), + existing_nullable=True) + op.alter_column('github_installations', 'github_ids', + existing_type=sa.Text(), + type_=postgresql.JSON(astext_type=sa.Text()), + comment="identifiers on the github database, prolly won't be used", + existing_comment="Identifiers on the github database, prolly won't be used", + existing_nullable=True) + op.alter_column('github_installations', 'id', + existing_type=sa.BIGINT(), + server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), + existing_nullable=False, + autoincrement=True) + op.create_table_comment( + 'github_classroom_data', + 'Table for save the details about github classroom assignment datas', + existing_comment='Table for saving the details about github classroom assignment data', + schema=None + ) + op.alter_column('github_classroom_data', 'points_available', + existing_type=sa.Integer(), + type_=sa.VARCHAR(), + existing_nullable=True) + op.alter_column('github_classroom_data', 'points_awarded', + existing_type=sa.Integer(), + type_=sa.VARCHAR(), + existing_nullable=True) + op.alter_column('github_classroom_data', 'submission_timestamp', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=False) + op.alter_column('github_classroom_data', 'created_at', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=False, + existing_server_default=sa.text('now()')) + op.alter_column('github_classroom_data', 'id', + existing_type=sa.BIGINT(), + server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), + existing_nullable=False, + autoincrement=True) + op.create_foreign_key('dmp_week_updates_dmp_id_fkey', 'dmp_week_updates', 'dmp_issues', ['dmp_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE') + op.alter_column('dmp_week_updates', 'id', + existing_type=sa.BIGINT(), + server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), + existing_nullable=False, + autoincrement=True) + op.alter_column('dmp_tickets', 'index', + existing_type=sa.Integer(), + server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=32767, cycle=False, cache=1), + type_=sa.SMALLINT(), + existing_nullable=False, + autoincrement=True) + op.alter_column('dmp_tickets', 'ticket_points', + existing_type=sa.Integer(), + type_=sa.SMALLINT(), + existing_comment='How many points the ticket is worth', + existing_nullable=True, + existing_server_default=sa.text("'0'::smallint")) + op.alter_column('dmp_tickets', 'created_at', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text('now()')) + op.drop_constraint(None, 'dmp_pr_updates', type_='foreignkey') + op.create_foreign_key('dmp_pr_updates_dmp_id_fkey', 'dmp_pr_updates', 'dmp_issues', ['dmp_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE') + op.create_unique_constraint('dmp_pr_updates_pr_id_key', 'dmp_pr_updates', ['pr_id']) + op.alter_column('dmp_pr_updates', 'closed_at', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True) + op.alter_column('dmp_pr_updates', 'merged_at', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True) + op.alter_column('dmp_pr_updates', 'pr_updated_at', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True) + op.alter_column('dmp_pr_updates', 'created_at', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=False, + existing_server_default=sa.text('now()')) + op.create_unique_constraint('dmp_orgs_id_key', 'dmp_orgs', ['id']) + op.alter_column('dmp_orgs', 'created_at', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=False, + existing_server_default=sa.text('now()')) + op.alter_column('dmp_orgs', 'id', + existing_type=sa.BIGINT(), + server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), + existing_nullable=False, + autoincrement=True) + op.drop_column('dmp_orgs', 'version') + op.add_column('dmp_issues', sa.Column('repo_owner', sa.TEXT(), autoincrement=False, nullable=True)) + op.drop_constraint(None, 'dmp_issues', type_='foreignkey') + op.create_foreign_key('dmp_issues_org_id_fkey', 'dmp_issues', 'dmp_orgs', ['org_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE') + op.create_unique_constraint('dmp_issues_dmp_id_key', 'dmp_issues', ['id']) + op.alter_column('dmp_issues', 'id', + existing_type=sa.BIGINT(), + server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), + existing_nullable=False, + autoincrement=True) + op.drop_constraint(None, 'dmp_issue_updates', type_='foreignkey') + op.create_foreign_key('dmp_issue_updates_dmp_id_fkey', 'dmp_issue_updates', 'dmp_issues', ['dmp_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE') + op.create_unique_constraint('dmp_issue_updates_comment_id_key', 'dmp_issue_updates', ['comment_id']) + op.alter_column('dmp_issue_updates', 'comment_updated_at', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True) + op.alter_column('dmp_issue_updates', 'created_at', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=False, + existing_server_default=sa.text('now()')) + op.add_column('discord_engagement', sa.Column('rockstarBadge', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True)) + op.add_column('discord_engagement', sa.Column('enthusiastBadge', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True)) + op.add_column('discord_engagement', sa.Column('risingStarBadge', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True)) + op.add_column('discord_engagement', sa.Column('converserBadge', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True)) + op.add_column('discord_engagement', sa.Column('apprenticeBadge', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True)) + op.alter_column('discord_engagement', 'created_at', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text('now()')) + op.alter_column('discord_engagement', 'id', + existing_type=sa.BIGINT(), + server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), + existing_nullable=False, + autoincrement=True) + op.drop_column('discord_engagement', 'risingstarbadge') + op.drop_column('discord_engagement', 'enthusiastbadge') + op.drop_column('discord_engagement', 'rockstarbadge') + op.drop_column('discord_engagement', 'apprenticebadge') + op.drop_column('discord_engagement', 'converserbadge') + op.create_table_comment( + 'contributors_registration', + 'This is a duplicate of contributors_registration_old', + existing_comment=None, + schema=None + ) + op.alter_column('contributors_registration', 'joined_at', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=False, + existing_server_default=sa.text('now()')) + op.alter_column('contributors_registration', 'id', + existing_type=sa.BIGINT(), + server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), + existing_nullable=False, + autoincrement=True) + op.add_column('contributors_discord', sa.Column('experience', sa.TEXT(), autoincrement=False, nullable=True)) + op.add_column('contributors_discord', sa.Column('country', sa.TEXT(), autoincrement=False, nullable=True)) + op.add_column('contributors_discord', sa.Column('city', sa.TEXT(), autoincrement=False, nullable=True)) + op.alter_column('contributors_discord', 'joined_at', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=False, + existing_server_default=sa.text('now()')) + op.alter_column('contributors_discord', 'id', + existing_type=sa.BIGINT(), + server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), + existing_nullable=False, + autoincrement=True) + op.add_column('contributor_points', sa.Column('user_id', sa.BIGINT(), autoincrement=False, nullable=True)) + op.drop_constraint(None, 'contributor_points', type_='foreignkey') + op.create_foreign_key('contributor_points_contributors_id_fkey', 'contributor_points', 'contributors_registration', ['user_id'], ['id']) + op.drop_column('contributor_points', 'contributors_id') + op.alter_column('contributor_names', 'id', + existing_type=sa.BIGINT(), + server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), + existing_nullable=False, + autoincrement=True) + op.alter_column('connected_prs', 'merged_at', + existing_type=sa.Text(), + type_=postgresql.TIMESTAMP(), + existing_nullable=True) + op.alter_column('connected_prs', 'raised_at', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=False) + op.alter_column('connected_prs', 'created_at', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text('now()')) + op.alter_column('community_orgs', 'name', + existing_type=sa.TEXT(), + nullable=False) + op.alter_column('chapters', 'created_at', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True) + op.alter_column('chapters', 'discord_role_id', + existing_type=sa.BIGINT(), + nullable=False, + comment='db od of the corresponding member role in discord server', + existing_comment='db id of the corresponding member role in discord server') + op.alter_column('chapters', 'org_name', + existing_type=sa.TEXT(), + nullable=False) + op.alter_column('ccbp_tickets', 'closed_at', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_comment='date-time at which issue was closed', + existing_nullable=True) + op.alter_column('ccbp_tickets', 'index', + existing_type=sa.SMALLINT(), + server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=32767, cycle=False, cache=1), + existing_nullable=False, + autoincrement=True) + op.alter_column('ccbp_tickets', 'issue_id', + existing_type=sa.BIGINT(), + nullable=False) + op.alter_column('ccbp_tickets', 'created_at', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text('now()')) + op.alter_column('badges', 'updated_at', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text('CURRENT_TIMESTAMP')) + op.alter_column('badges', 'created_at', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text('CURRENT_TIMESTAMP')) + op.alter_column('badges', 'id', + existing_type=sa.UUID(), + type_=sa.INTEGER(), + existing_nullable=False) + op.alter_column('app_comments', 'issue_id', + existing_type=sa.BIGINT(), + nullable=False) + op.alter_column('app_comments', 'updated_at', + existing_type=db.models.DateTime(), + type_=postgresql.TIMESTAMP(timezone=True), + existing_nullable=True, + existing_server_default=sa.text('now()')) + op.alter_column('app_comments', 'id', + existing_type=sa.BigInteger(), + type_=sa.UUID(), + existing_nullable=False, + autoincrement=True, + existing_server_default=sa.text('gen_random_uuid()')) + op.create_table('unstructured discord data', + sa.Column('text', sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column('author', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('channel', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('channel_name', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('uuid', sa.UUID(), server_default=sa.text('gen_random_uuid()'), autoincrement=False, nullable=False), + sa.Column('author_name', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('author_roles', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), + sa.Column('sent_at', sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('uuid', name='unstructured discord data_duplicate_pkey'), + sa.UniqueConstraint('uuid', name='unstructured discord data_duplicate_uuid_key') + ) + op.create_table('__community_program_product_wise_tickets', + sa.Column('url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('repository_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('comments_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('events_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('html_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('id', sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column('node_id', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('title', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('raised_by_username', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('raised_by_id', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('labels', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('status', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('assignees', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('number_of_comments', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column('closed_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column('community_label', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True), + sa.Column('product', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('gh_organisation', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('repository name', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('organisation', sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='community_program_tickets_duplicate_pkey') + ) + op.create_table('__mentorship_program_selected_contributors', + sa.Column('id', sa.UUID(), server_default=sa.text('gen_random_uuid()'), autoincrement=False, nullable=False), + sa.Column('name', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('github_username', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('project_name', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('ticket_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='mentorship_program_selected_contributors_pkey'), + sa.UniqueConstraint('name', name='mentorship_program_selected_contributors_name_key'), + comment='List of contributors selected for C4GT Mentorship Program 2023' + ) + op.create_table('__community_organisations', + sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), + sa.Column('name', sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='organisations_pkey'), + sa.UniqueConstraint('name', name='organisations_name_key'), + postgresql_ignore_search_path=False + ) + op.create_table('__community_program_tickets', + sa.Column('url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('repository_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('comments_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('events_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('html_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('id', sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column('node_id', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('title', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('raised_by_username', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('raised_by_id', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('labels', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('status', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('assignees', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('number_of_comments', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column('closed_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column('community_label', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='community_program_tickets_pkey') + ) + op.create_table('__pull_requests', + sa.Column('id', sa.UUID(), server_default=sa.text('gen_random_uuid()'), autoincrement=False, nullable=False), + sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=True), + sa.Column('api_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('html_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('raised_by', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('raised_at', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('raised_by_username', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('status', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('is_merged', sa.BOOLEAN(), autoincrement=False, nullable=True), + sa.Column('merged_by', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('merged_at', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('merged_by_username', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('pr_id', sa.BIGINT(), autoincrement=False, nullable=False, comment='github id of the pr'), + sa.Column('points', sa.SMALLINT(), server_default=sa.text("'10'::smallint"), autoincrement=False, nullable=False), + sa.Column('ticket_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='pull_requests_pkey1'), + sa.UniqueConstraint('html_url', name='pull_requests_html_url_key'), + sa.UniqueConstraint('pr_id', name='pull_requests_pr_id_key') + ) + op.create_table('contributors_registration_old', + sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), + sa.Column('discord_id', sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column('github_id', sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column('github_url', sa.VARCHAR(), autoincrement=False, nullable=False), + sa.Column('discord_username', sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column('joined_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=False), + sa.Column('email', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('name', sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='contributors_registration_duplicate_pkey'), + sa.UniqueConstraint('discord_id', name='contributors_registration_duplicate_discord_id_key'), + sa.UniqueConstraint('github_id', name='contributors_registration_duplicate_github_id_key') + ) + op.create_table('__dev_onboarding', + sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), + sa.Column('organisation', sa.TEXT(), autoincrement=False, nullable=False), + sa.Column('email', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('repos', postgresql.JSON(astext_type=sa.Text()), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='Onboarding_Dev_pkey'), + sa.UniqueConstraint('organisation', name='Onboarding_Dev_org_key') + ) + op.create_table('__comments', + sa.Column('id', sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column('url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('html_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('issue_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('node_id', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('commented_by', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('commented_by_id', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), + sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), + sa.Column('content', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('reactions_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='comments_pkey') + ) + op.create_table('__mentorship_program_projects', + sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), + sa.Column('name', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('description', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('repository', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('product', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('issue_page_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('isssue_api_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('repository_api_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.ForeignKeyConstraint(['product'], ['product.name'], name='__mentorship_program_projects_product_fkey', ondelete='SET DEFAULT'), + sa.PrimaryKeyConstraint('id', name='projects_pkey'), + sa.UniqueConstraint('name', name='projects_name_key'), + comment='Selected projects under C4GT 2023' + ) + op.create_table('__dashboard_config', + sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), + sa.Column('dashboard', sa.TEXT(), autoincrement=False, nullable=False), + sa.Column('starting date', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='dashboard _config_pkey') + ) + op.create_table('__applicant', + sa.Column('id', sa.UUID(), server_default=sa.text('gen_random_uuid()'), autoincrement=False, nullable=False), + sa.Column('sheet_username', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('discord_id', sa.BIGINT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='applicant_pkey'), + sa.UniqueConstraint('discord_id', name='applicant_discord_id_key') + ) + op.create_table('__contributors_discord', + sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), + sa.Column('discord_id', sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column('github_id', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('github_url', sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column('discord_username', sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column('joined_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=False), + sa.Column('email', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column(' name', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('chapter', sa.TEXT(), autoincrement=False, nullable=True, comment="the chapter they're associated with"), + sa.Column('gender', sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='__contributors_pkey'), + sa.UniqueConstraint('discord_id', name='__contributors_discord_id_key') + ) + op.create_table('__community_program_unique_user_data', + sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), + sa.Column('ticket_name', sa.TEXT(), autoincrement=False, nullable=False), + sa.Column('status', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('linked_pr', sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column('linked_pr_author_id', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('linked_pr_author_username', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('is_registered', sa.BOOLEAN(), autoincrement=False, nullable=True), + sa.Column('ticket_link', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('linked_pr_link', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('merged', sa.BOOLEAN(), autoincrement=False, nullable=True), + sa.Column('state', sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='community_program_unique_user_data_pkey') + ) + op.create_table('__mentorship_program_tickets', + sa.Column('url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('repository_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('comments_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('events_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('html_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('id', sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column('node_id', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('title', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('raised_by_username', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('raised_by_id', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('labels', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('status', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('assignees', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('number_of_comments', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column('closed_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='mentorship_program_tickets_pkey') + ) + op.create_table('__mentorship_program_pull_request', + sa.Column('pr_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('pr_id', sa.INTEGER(), autoincrement=False, nullable=False), + sa.Column('pr_node_id', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('html_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('status', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('title', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('raised_by_username', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('raised_by_id', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('body', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column('closed_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column('merged_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column('assignees', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), + sa.Column('requested_reviewers', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), + sa.Column('labels', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), + sa.Column('review_comments_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('comments_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('repository_id', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('repository_owner_name', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('repository_owner_id', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('repository_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('merged', sa.BOOLEAN(), autoincrement=False, nullable=True), + sa.Column('number_of_commits', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('number_of_comments', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('lines_of_code_added', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('lines_of_code_removed', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('number_of_files_changed', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('merged_by_id', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('merged_by_username', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('linked_ticket', sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('pr_id', name='mentorship_program_pull_request_pkey') + ) + op.create_table('__mentorship_program_ticket_comments', + sa.Column('id', sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column('url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('html_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('issue_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('node_id', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('commented_by', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('commented_by_id', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), + sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), + sa.Column('content', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('reactions_url', sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='mentorship_program_ticket_comments_pkey') + ) + op.create_table('__mentors', + sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), + sa.Column('discord_id', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('github_id', sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column('github_url', sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column('discord_username', sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column('organisation', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('name', sa.TEXT(), autoincrement=False, nullable=False), + sa.Column('email', sa.TEXT(), autoincrement=False, nullable=True), + sa.ForeignKeyConstraint(['organisation'], ['__community_organisations.name'], name='__mentors_organisation_fkey'), + sa.PrimaryKeyConstraint('id', name='mentors_pkey') + ) + op.create_table('__contributors_vc', + sa.Column('github_username', sa.TEXT(), autoincrement=False, nullable=False), + sa.Column('discord_id', sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column('certificate_link', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('stats', sa.TEXT(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('github_username', name='contributors_vc_pkey') + ) + op.drop_table('user_points_mapping') + op.drop_table('unstructured_discord_data') + op.drop_table('role_master') + op.drop_table('leaderboard') + op.drop_table('github_profile_data') + # ### end Alembic commands ### diff --git a/migrations/versions/947179aaa97c_version_added.py b/migrations/versions/947179aaa97c_version_added.py deleted file mode 100644 index 9858072..0000000 --- a/migrations/versions/947179aaa97c_version_added.py +++ /dev/null @@ -1,1004 +0,0 @@ -"""Version added - -Revision ID: 947179aaa97c -Revises: 5709cf5c2772 -Create Date: 2024-12-06 12:46:09.193639 - -""" -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision: str = '947179aaa97c' -down_revision: Union[str, None] = '5709cf5c2772' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.drop_table('ccbp_tickets') - op.drop_table('pr_history') - op.drop_table('mentorship_program_website_commits') - op.drop_table('__community_program_tickets') - op.drop_table('chapters') - op.drop_table('__dev_onboarding') - op.drop_table('discord_engagement') - op.drop_table('__mentors') - op.drop_table('connected_prs') - op.drop_table('github_classroom_data') - op.drop_table('pr_staging') - op.drop_table('contributors_registration') - op.drop_table('dmp_orgs') - op.drop_table('user_activity') - op.drop_table('contributor_names') - op.drop_table('__mentorship_program_ticket_comments') - op.drop_table('vc_logs') - op.drop_table('issue_contributors') - op.drop_table('__contributors_discord') - op.drop_table('app_comments') - op.drop_table('user_certificates') - op.drop_table('contributors_registration_old') - op.drop_table('product') - op.drop_table('ticket_comments') - op.drop_table('github_installations') - op.drop_table('mentorship_website_contributor_project') - op.drop_table('points_mapping') - op.drop_table('community_orgs') - op.drop_table('__mentorship_program_selected_contributors') - op.drop_table('github_organisations_to_organisations') - op.drop_table('badges') - op.drop_table('dmp_tickets') - op.drop_table('__dashboard_config') - op.drop_table('user_badges') - op.drop_table('point_system') - op.drop_table('__community_program_unique_user_data') - op.drop_table('__community_program_product_wise_tickets') - op.drop_table('dmp_pr_updates') - op.drop_table('mentor_details') - op.drop_table('mentorship_program_website_pull_request') - op.drop_table('mentorship_program_site_structure') - op.drop_table('contributor_points') - op.drop_table('dmp_week_updates') - op.drop_table('dmp_issues') - op.drop_table('dmp_issue_updates') - op.drop_table('issue_mentors') - op.drop_table('unlisted_tickets') - op.drop_table('mentorship_program_website_has_updated') - op.drop_table('__contributors_vc') - op.drop_table('unstructured discord data') - op.drop_table('__mentorship_program_pull_request') - op.drop_table('__applicant') - op.drop_table('users') - op.drop_table('__mentorship_program_tickets') - op.drop_table('__comments') - op.drop_table('__pull_requests') - op.drop_table('__mentorship_program_projects') - op.drop_table('discord_channels') - op.drop_table('__community_organisations') - op.drop_table('point_transactions') - op.drop_table('issues') - op.drop_table('contributors_discord') - op.drop_table('mentorship_program_website_comments') - op.drop_table('mentor_not_added') - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.create_table('mentor_not_added', - sa.Column('id', sa.BIGINT(), sa.Identity(always=True, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('mentor_github_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('issue_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='mentor_not_added_pkey') - ) - op.create_table('mentorship_program_website_comments', - sa.Column('comment_id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('html_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('commented_by_username', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('commented_by_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), - sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), - sa.Column('body', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('pr_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('comment_id', name='mentorship_program_website_comments_pkey') - ) - op.create_table('contributors_discord', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('discord_id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('github_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('github_url', sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column('discord_username', sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column('joined_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=False), - sa.Column('email', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('chapter', sa.TEXT(), autoincrement=False, nullable=True, comment="the chapter they're associated with"), - sa.Column('gender', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('is_active', sa.BOOLEAN(), server_default=sa.text('true'), autoincrement=False, nullable=False), - sa.Column('country', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('city', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('experience', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('name', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='contributors_discord_duplicate_pkey'), - sa.UniqueConstraint('discord_id', name='contributors_discord_duplicate_discord_id_key') - ) - op.create_table('issues', - sa.Column('id', sa.INTEGER(), server_default=sa.text("nextval('issues_id_seq'::regclass)"), autoincrement=True, nullable=False), - sa.Column('link', sa.TEXT(), autoincrement=False, nullable=False), - sa.Column('labels', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), - sa.Column('complexity', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('skills', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('technology', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('status', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('title', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('domain', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('description', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('project_type', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('issue_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('org_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.ForeignKeyConstraint(['org_id'], ['community_orgs.id'], name='issues_org_id_fkey', onupdate='CASCADE', ondelete='SET NULL'), - sa.PrimaryKeyConstraint('id', name='issues_pkey'), - sa.UniqueConstraint('issue_id', name='unique_issue_id'), - postgresql_ignore_search_path=False - ) - op.create_table('point_transactions', - sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False), - sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('issue_id', sa.INTEGER(), autoincrement=False, nullable=False), - sa.Column('point', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('type', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), - sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), - sa.Column('angel_mentor_id', sa.INTEGER(), autoincrement=False, nullable=True), - sa.ForeignKeyConstraint(['issue_id'], ['issues.id'], name='point_transactions_issue_id_fkey'), - sa.ForeignKeyConstraint(['user_id'], ['contributors_registration.id'], name='point_transactions_user_id_fkey', onupdate='CASCADE', ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id', name='point_transactions_pkey') - ) - op.create_table('__community_organisations', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('name', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='organisations_pkey'), - sa.UniqueConstraint('name', name='organisations_name_key'), - postgresql_ignore_search_path=False - ) - op.create_table('discord_channels', - sa.Column('channel_id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('channel_name', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('webhook', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('should_notify', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=False), - sa.PrimaryKeyConstraint('channel_id', name='discord_channels_pkey'), - postgresql_ignore_search_path=False - ) - op.create_table('__mentorship_program_projects', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('name', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('description', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('repository', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('product', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('issue_page_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('isssue_api_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('repository_api_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.ForeignKeyConstraint(['product'], ['product.name'], name='__mentorship_program_projects_product_fkey', ondelete='SET DEFAULT'), - sa.PrimaryKeyConstraint('id', name='projects_pkey'), - sa.UniqueConstraint('name', name='projects_name_key'), - comment='Selected projects under C4GT 2023', - postgresql_ignore_search_path=False - ) - op.create_table('__pull_requests', - sa.Column('id', sa.UUID(), server_default=sa.text('gen_random_uuid()'), autoincrement=False, nullable=False), - sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=True), - sa.Column('api_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('html_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('raised_by', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('raised_at', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('raised_by_username', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('status', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('is_merged', sa.BOOLEAN(), autoincrement=False, nullable=True), - sa.Column('merged_by', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('merged_at', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('merged_by_username', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('pr_id', sa.BIGINT(), autoincrement=False, nullable=False, comment='github id of the pr'), - sa.Column('points', sa.SMALLINT(), server_default=sa.text("'10'::smallint"), autoincrement=False, nullable=False), - sa.Column('ticket_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='pull_requests_pkey1'), - sa.UniqueConstraint('html_url', name='pull_requests_html_url_key'), - sa.UniqueConstraint('pr_id', name='pull_requests_pr_id_key') - ) - op.create_table('__comments', - sa.Column('id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('html_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('issue_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('node_id', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('commented_by', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('commented_by_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), - sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), - sa.Column('content', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('reactions_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='comments_pkey') - ) - op.create_table('__mentorship_program_tickets', - sa.Column('url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('repository_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('comments_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('events_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('html_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('node_id', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('title', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('raised_by_username', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('raised_by_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('labels', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('status', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('assignees', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('number_of_comments', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('closed_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='mentorship_program_tickets_pkey') - ) - op.create_table('users', - sa.Column('id', sa.INTEGER(), server_default=sa.text("nextval('users_id_seq'::regclass)"), autoincrement=True, nullable=False), - sa.Column('name', sa.TEXT(), autoincrement=False, nullable=False), - sa.Column('discord', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('github', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('points', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('level', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), - sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='users_pkey'), - postgresql_ignore_search_path=False - ) - op.create_table('__applicant', - sa.Column('id', sa.UUID(), server_default=sa.text('gen_random_uuid()'), autoincrement=False, nullable=False), - sa.Column('sheet_username', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('discord_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='applicant_pkey'), - sa.UniqueConstraint('discord_id', name='applicant_discord_id_key') - ) - op.create_table('__mentorship_program_pull_request', - sa.Column('pr_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('pr_id', sa.INTEGER(), autoincrement=False, nullable=False), - sa.Column('pr_node_id', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('html_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('status', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('title', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('raised_by_username', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('raised_by_id', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('body', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('closed_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('merged_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('assignees', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), - sa.Column('requested_reviewers', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), - sa.Column('labels', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), - sa.Column('review_comments_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('comments_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('repository_id', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('repository_owner_name', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('repository_owner_id', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('repository_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('merged', sa.BOOLEAN(), autoincrement=False, nullable=True), - sa.Column('number_of_commits', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('number_of_comments', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('lines_of_code_added', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('lines_of_code_removed', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('number_of_files_changed', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('merged_by_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('merged_by_username', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('linked_ticket', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('pr_id', name='mentorship_program_pull_request_pkey') - ) - op.create_table('unstructured discord data', - sa.Column('text', sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column('author', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('channel', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('channel_name', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('uuid', sa.UUID(), server_default=sa.text('gen_random_uuid()'), autoincrement=False, nullable=False), - sa.Column('author_name', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('author_roles', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), - sa.Column('sent_at', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('uuid', name='unstructured discord data_duplicate_pkey'), - sa.UniqueConstraint('uuid', name='unstructured discord data_duplicate_uuid_key') - ) - op.create_table('__contributors_vc', - sa.Column('github_username', sa.TEXT(), autoincrement=False, nullable=False), - sa.Column('discord_id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('certificate_link', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('stats', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('github_username', name='contributors_vc_pkey') - ) - op.create_table('mentorship_program_website_has_updated', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('project', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('week1_update_date', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), - sa.Column('week2_update_date', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), - sa.Column('week3_update_date', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), - sa.Column('week4_update_date', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), - sa.Column('week5_update_date', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), - sa.Column('week6_update_date', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), - sa.Column('week7_update_date', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), - sa.Column('week8_update_date', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), - sa.Column('week9_update_date', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), - sa.Column('week1_is_default_text', sa.BOOLEAN(), autoincrement=False, nullable=True), - sa.Column('week2_is_default_text', sa.BOOLEAN(), autoincrement=False, nullable=True), - sa.Column('week3_is_default_text', sa.BOOLEAN(), autoincrement=False, nullable=True), - sa.Column('week4_is_default_text', sa.BOOLEAN(), autoincrement=False, nullable=True), - sa.Column('week5_is_default_text', sa.BOOLEAN(), autoincrement=False, nullable=True), - sa.Column('week6_is_default_text', sa.BOOLEAN(), autoincrement=False, nullable=True), - sa.Column('week7_is_default_text', sa.BOOLEAN(), autoincrement=False, nullable=True), - sa.Column('week8_is_default_text', sa.BOOLEAN(), autoincrement=False, nullable=True), - sa.Column('week9_is_default_text', sa.BOOLEAN(), autoincrement=False, nullable=True), - sa.Column('product', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('project_folder', sa.TEXT(), autoincrement=False, nullable=False), - sa.Column('all_links', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), - sa.ForeignKeyConstraint(['project'], ['__mentorship_program_projects.name'], name='mentorship_program_website_has_updated_project_fkey'), - sa.PrimaryKeyConstraint('id', name='mentorship_program_webite_has_updated_pkey'), - sa.UniqueConstraint('project_folder', name='mentorship_program_website_has_updated_project_folder_key') - ) - op.create_table('unlisted_tickets', - sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=True), - sa.Column('name', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('product', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('complexity', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('project_category', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), - sa.Column('project_sub_category', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), - sa.Column('reqd_skills', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('issue_id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('api_endpoint_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('ticket_points', sa.SMALLINT(), server_default=sa.text("'0'::smallint"), autoincrement=False, nullable=True, comment='How many points the ticket is worth'), - sa.Column('index', sa.SMALLINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=32767, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('mentors', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), - sa.Column('uuid', sa.UUID(), server_default=sa.text('gen_random_uuid()'), autoincrement=False, nullable=False), - sa.Column('status', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('organization', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('uuid', 'issue_id', name='unlisted_tickets_pkey'), - sa.UniqueConstraint('api_endpoint_url', name='unlisted_tickets_api_endpoint_url_key'), - sa.UniqueConstraint('index', name='unlisted_tickets_index_key'), - sa.UniqueConstraint('issue_id', name='unlisted_tickets_"issue_id"_key'), - sa.UniqueConstraint('url', name='unlisted_tickets_url_key') - ) - op.create_table('issue_mentors', - sa.Column('id', sa.INTEGER(), server_default=sa.text("nextval('issue_mentors_id_seq'::regclass)"), autoincrement=True, nullable=False), - sa.Column('issue_id', sa.INTEGER(), autoincrement=False, nullable=False), - sa.Column('angel_mentor_id', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('org_mentor_id', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('created_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), - sa.Column('updated_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), - sa.ForeignKeyConstraint(['angel_mentor_id'], ['contributors_registration.id'], name='issue_mentors_mentor_fkey'), - sa.ForeignKeyConstraint(['issue_id'], ['issues.id'], name='issue_mentors_issue_fkey'), - sa.PrimaryKeyConstraint('id', name='issue_mentors_pkey'), - sa.UniqueConstraint('issue_id', name='unique_issue_id_mentors') - ) - op.create_table('dmp_issue_updates', - sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=False), - sa.Column('body_text', sa.TEXT(), server_default=sa.text("''::text"), autoincrement=False, nullable=True), - sa.Column('comment_link', sa.TEXT(), server_default=sa.text("''::text"), autoincrement=False, nullable=True), - sa.Column('comment_id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('comment_api', sa.VARCHAR(), server_default=sa.text("''::character varying"), autoincrement=False, nullable=True), - sa.Column('comment_updated_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), - sa.Column('dmp_id', sa.BIGINT(), server_default=sa.text("'1'::bigint"), autoincrement=False, nullable=False), - sa.Column('created_by', sa.TEXT(), server_default=sa.text("'NA'::text"), autoincrement=False, nullable=False), - sa.ForeignKeyConstraint(['dmp_id'], ['dmp_issues.id'], name='dmp_issue_updates_dmp_id_fkey', onupdate='CASCADE', ondelete='CASCADE'), - sa.PrimaryKeyConstraint('comment_id', name='dmp_issue_updates_pkey'), - sa.UniqueConstraint('comment_id', name='dmp_issue_updates_comment_id_key'), - comment='Having records of dmp with issue details' - ) - op.create_table('dmp_issues', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('issue_url', sa.VARCHAR(), server_default=sa.text("'NA'::character varying"), autoincrement=False, nullable=False), - sa.Column('issue_number', sa.BIGINT(), server_default=sa.text("'1'::bigint"), autoincrement=False, nullable=False), - sa.Column('mentor_username', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('contributor_username', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('title', sa.TEXT(), server_default=sa.text("'NA'::text"), autoincrement=False, nullable=False), - sa.Column('org_id', sa.BIGINT(), server_default=sa.text("'1'::bigint"), autoincrement=False, nullable=False), - sa.Column('description', sa.TEXT(), server_default=sa.text("'NA'::text"), autoincrement=False, nullable=False), - sa.Column('repo', sa.TEXT(), server_default=sa.text("'NA'::text"), autoincrement=False, nullable=False), - sa.Column('repo_owner', sa.TEXT(), autoincrement=False, nullable=True), - sa.ForeignKeyConstraint(['org_id'], ['dmp_orgs.id'], name='dmp_issues_org_id_fkey', onupdate='CASCADE', ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id', name='dmp_issues_pkey'), - sa.UniqueConstraint('id', name='dmp_issues_dmp_id_key'), - postgresql_ignore_search_path=False - ) - op.create_table('dmp_week_updates', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('issue_url', sa.TEXT(), autoincrement=False, nullable=False), - sa.Column('week', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('total_task', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('completed_task', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('progress', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True), - sa.Column('task_data', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('dmp_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.ForeignKeyConstraint(['dmp_id'], ['dmp_issues.id'], name='dmp_week_updates_dmp_id_fkey', onupdate='CASCADE', ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id', name='dmp_week_updates_pkey') - ) - op.create_table('contributor_points', - sa.Column('id', sa.BIGINT(), autoincrement=True, nullable=False), - sa.Column('user_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('total_points', sa.INTEGER(), server_default=sa.text('0'), autoincrement=False, nullable=False), - sa.ForeignKeyConstraint(['user_id'], ['contributors_registration.id'], name='contributor_points_contributors_id_fkey'), - sa.PrimaryKeyConstraint('id', name='contributor_points_pkey') - ) - op.create_table('mentorship_program_site_structure', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('product', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('project', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('contributor', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('website directory_label', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('directory_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.ForeignKeyConstraint(['contributor'], ['__mentorship_program_selected_contributors.name'], name='mentorship_program_site_structure_contributor_fkey'), - sa.ForeignKeyConstraint(['product'], ['product.name'], name='mentorship_program_site_structure_product_fkey'), - sa.ForeignKeyConstraint(['project'], ['__mentorship_program_projects.name'], name='mentorship_program_site_structure_project_fkey'), - sa.PrimaryKeyConstraint('id', name='mentorship_program_site_structure_pkey'), - comment='a mapping for the milestones website structure' - ) - op.create_table('mentorship_program_website_pull_request', - sa.Column('pr_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('pr_id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('pr_node_id', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('html_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('status', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('title', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('raised_by_username', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('raised_by_id', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('body', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('closed_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('merged_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('assignees', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), - sa.Column('requested_reviewers', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), - sa.Column('labels', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), - sa.Column('review_comments_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('comments_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('repository_id', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('repository_owner_name', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('repository_owner_id', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('repository_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('merged', sa.BOOLEAN(), autoincrement=False, nullable=True), - sa.Column('number_of_commits', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('number_of_comments', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('lines_of_code_added', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('lines_of_code_removed', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('number_of_files_changed', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('merged_by_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('merged_by_username', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('linked_ticket', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('project_name', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('project_folder_label', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('week_number', sa.SMALLINT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('pr_id', name='mentorship_program_website_pull_request_pkey'), - sa.UniqueConstraint('pr_node_id', name='mentorship_program_website_pull_request_pr_node_id_key') - ) - op.create_table('mentor_details', - sa.Column('id', sa.INTEGER(), server_default=sa.text("nextval('mentor_details_id_seq'::regclass)"), autoincrement=True, nullable=False), - sa.Column('name', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('email', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('discord_id', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('discord_username', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('github_id', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('created_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), - sa.Column('updated_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='mentor_details_pkey'), - postgresql_ignore_search_path=False - ) - op.create_table('dmp_pr_updates', - sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=False), - sa.Column('pr_id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('status', sa.VARCHAR(), server_default=sa.text("''::character varying"), autoincrement=False, nullable=False), - sa.Column('title', sa.TEXT(), server_default=sa.text("''::text"), autoincrement=False, nullable=False), - sa.Column('pr_updated_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), - sa.Column('merged_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), - sa.Column('closed_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), - sa.Column('dmp_id', sa.BIGINT(), server_default=sa.text("'1'::bigint"), autoincrement=False, nullable=False), - sa.Column('link', sa.TEXT(), server_default=sa.text("'NA'::text"), autoincrement=False, nullable=False), - sa.ForeignKeyConstraint(['dmp_id'], ['dmp_issues.id'], name='dmp_pr_updates_dmp_id_fkey', onupdate='CASCADE', ondelete='CASCADE'), - sa.PrimaryKeyConstraint('pr_id', name='dmp_pr_updates_pkey'), - sa.UniqueConstraint('pr_id', name='dmp_pr_updates_pr_id_key'), - comment='Having PR related records' - ) - op.create_table('__community_program_product_wise_tickets', - sa.Column('url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('repository_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('comments_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('events_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('html_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('node_id', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('title', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('raised_by_username', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('raised_by_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('labels', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('status', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('assignees', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('number_of_comments', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('closed_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('community_label', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True), - sa.Column('product', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('gh_organisation', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('repository name', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('organisation', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='community_program_tickets_duplicate_pkey') - ) - op.create_table('__community_program_unique_user_data', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('ticket_name', sa.TEXT(), autoincrement=False, nullable=False), - sa.Column('status', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('linked_pr', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('linked_pr_author_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('linked_pr_author_username', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('is_registered', sa.BOOLEAN(), autoincrement=False, nullable=True), - sa.Column('ticket_link', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('linked_pr_link', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('merged', sa.BOOLEAN(), autoincrement=False, nullable=True), - sa.Column('state', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='community_program_unique_user_data_pkey') - ) - op.create_table('point_system', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('complexity', sa.TEXT(), autoincrement=False, nullable=False), - sa.Column('points', sa.SMALLINT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='point_system_pkey') - ) - op.create_table('user_badges', - sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False), - sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=False), - sa.Column('badge_id', sa.INTEGER(), autoincrement=False, nullable=False), - sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), - sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), - sa.ForeignKeyConstraint(['badge_id'], ['badges.id'], name='user_badges_badge_id_fkey'), - sa.ForeignKeyConstraint(['user_id'], ['users.id'], name='user_badges_user_id_fkey'), - sa.PrimaryKeyConstraint('id', name='user_badges_pkey') - ) - op.create_table('__dashboard_config', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('dashboard', sa.TEXT(), autoincrement=False, nullable=False), - sa.Column('starting date', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='dashboard _config_pkey') - ) - op.create_table('dmp_tickets', - sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=True), - sa.Column('name', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('product', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('complexity', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('project_category', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), - sa.Column('project_sub_category', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), - sa.Column('reqd_skills', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), - sa.Column('issue_id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('api_endpoint_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('ticket_points', sa.SMALLINT(), server_default=sa.text("'0'::smallint"), autoincrement=False, nullable=True, comment='How many points the ticket is worth'), - sa.Column('index', sa.SMALLINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=32767, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('mentors', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), - sa.Column('uuid', sa.UUID(), server_default=sa.text('gen_random_uuid()'), autoincrement=False, nullable=False), - sa.Column('status', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('community_label', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True, comment='has community label'), - sa.Column('organization', sa.TEXT(), server_default=sa.text("''::text"), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('uuid', name='dmp_tickets_pkey'), - sa.UniqueConstraint('api_endpoint_url', name='dmp_tickets_api_endpoint_url_key'), - sa.UniqueConstraint('index', name='dmp_tickets_index_key'), - sa.UniqueConstraint('issue_id', name='dmp_tickets_"issue_id"_key'), - sa.UniqueConstraint('url', name='dmp_tickets_url_key') - ) - op.create_table('badges', - sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False), - sa.Column('image', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('text', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('description', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), - sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='badges_pkey') - ) - op.create_table('github_organisations_to_organisations', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('github_organisation', sa.TEXT(), autoincrement=False, nullable=False), - sa.Column('organisation', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True, comment='creation date of organization ticket'), - sa.PrimaryKeyConstraint('id', name='github_organisations_to_organisations_pkey') - ) - op.create_table('__mentorship_program_selected_contributors', - sa.Column('id', sa.UUID(), server_default=sa.text('gen_random_uuid()'), autoincrement=False, nullable=False), - sa.Column('name', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('github_username', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('project_name', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('ticket_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='mentorship_program_selected_contributors_pkey'), - sa.UniqueConstraint('name', name='mentorship_program_selected_contributors_name_key'), - comment='List of contributors selected for C4GT Mentorship Program 2023' - ) - op.create_table('community_orgs', - sa.Column('id', sa.BIGINT(), server_default=sa.text("nextval('community_orgs_id_seq'::regclass)"), autoincrement=True, nullable=False), - sa.Column('name', sa.TEXT(), autoincrement=False, nullable=False), - sa.PrimaryKeyConstraint('id', name='community_orgs_pkey'), - postgresql_ignore_search_path=False - ) - op.create_table('points_mapping', - sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False), - sa.Column('role', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('complexity', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('points', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), - sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='points_mapping_pkey') - ) - op.create_table('mentorship_website_contributor_project', - sa.Column('project_folder', sa.TEXT(), autoincrement=False, nullable=False), - sa.Column('contributor', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('project_folder', name='mentorship_website_contributor_project_pkey') - ) - op.create_table('github_installations', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('github_organisation', sa.TEXT(), autoincrement=False, nullable=False), - sa.Column('installation_id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('target_type', sa.TEXT(), autoincrement=False, nullable=True, comment='Type of github entity that installed the app, usually "Organisation"'), - sa.Column('github_ids', postgresql.JSON(astext_type=sa.Text()), autoincrement=False, nullable=True, comment="identifiers on the github database, prolly won't be used"), - sa.Column('permissions_and_events', postgresql.JSON(astext_type=sa.Text()), autoincrement=False, nullable=True), - sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), - sa.Column('organisation', sa.TEXT(), autoincrement=False, nullable=True), - sa.ForeignKeyConstraint(['organisation'], ['__community_organisations.name'], name='github_installations_organisation_fkey', onupdate='CASCADE'), - sa.PrimaryKeyConstraint('id', name='__github_installations_pkey'), - sa.UniqueConstraint('github_organisation', name='__github_installations_organisation_key'), - sa.UniqueConstraint('installation_id', name='__github_installations_installation_id_key') - ) - op.create_table('ticket_comments', - sa.Column('id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('html_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('issue_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('node_id', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('commented_by', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('commented_by_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), - sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), - sa.Column('content', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('reactions_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('ticket_url', sa.TEXT(), autoincrement=False, nullable=False), - sa.Column('issue_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('comment_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='ticket_comments_pkey') - ) - op.create_table('product', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('name', sa.TEXT(), server_default=sa.text("''::text"), autoincrement=False, nullable=False), - sa.Column('description', sa.TEXT(), server_default=sa.text("''::text"), autoincrement=False, nullable=True, comment='URL to the product entry on C4GT wiki'), - sa.Column('wiki_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('channel', sa.BIGINT(), autoincrement=False, nullable=True), - sa.ForeignKeyConstraint(['channel'], ['discord_channels.channel_id'], name='product_channel_fkey'), - sa.PrimaryKeyConstraint('id', name='products_pkey'), - sa.UniqueConstraint('name', name='products_product_name_key'), - comment="A table containing all 'Products' in C4GT 2023" - ) - op.create_table('contributors_registration_old', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('discord_id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('github_id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('github_url', sa.VARCHAR(), autoincrement=False, nullable=False), - sa.Column('discord_username', sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column('joined_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=False), - sa.Column('email', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('name', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='contributors_registration_duplicate_pkey'), - sa.UniqueConstraint('discord_id', name='contributors_registration_duplicate_discord_id_key'), - sa.UniqueConstraint('github_id', name='contributors_registration_duplicate_github_id_key') - ) - op.create_table('user_certificates', - sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False), - sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=False), - sa.Column('certificate_link', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), - sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), - sa.ForeignKeyConstraint(['user_id'], ['users.id'], name='user_certificates_user_id_fkey'), - sa.PrimaryKeyConstraint('id', name='user_certificates_pkey') - ) - op.create_table('app_comments', - sa.Column('id', sa.UUID(), server_default=sa.text('gen_random_uuid()'), autoincrement=False, nullable=False), - sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=True), - sa.Column('api_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('comment_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('issue_id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.PrimaryKeyConstraint('id', name='app_comments_pkey'), - sa.UniqueConstraint('issue_id', name='app_comments_issue_id_key') - ) - op.create_table('__contributors_discord', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('discord_id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('github_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('github_url', sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column('discord_username', sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column('joined_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=False), - sa.Column('email', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column(' name', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('chapter', sa.TEXT(), autoincrement=False, nullable=True, comment="the chapter they're associated with"), - sa.Column('gender', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='__contributors_pkey'), - sa.UniqueConstraint('discord_id', name='__contributors_discord_id_key') - ) - op.create_table('issue_contributors', - sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False), - sa.Column('issue_id', sa.INTEGER(), autoincrement=False, nullable=False), - sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), - sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), - sa.Column('contributor_id', sa.INTEGER(), autoincrement=False, nullable=False), - sa.Column('role', sa.BIGINT(), autoincrement=False, nullable=True), - sa.ForeignKeyConstraint(['contributor_id'], ['contributors_registration.id'], name='issue_contributors_contributor_id_fkey', onupdate='CASCADE', ondelete='CASCADE'), - sa.ForeignKeyConstraint(['issue_id'], ['issues.id'], name='issue_contributors_issue_id_fkey'), - sa.PrimaryKeyConstraint('id', name='issue_contributors_pkey'), - sa.UniqueConstraint('issue_id', name='unique_issue_id_contributors') - ) - op.create_table('vc_logs', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=False), - sa.Column('discord_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('discord_name', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('option', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='vc_logs_pkey') - ) - op.create_table('__mentorship_program_ticket_comments', - sa.Column('id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('html_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('issue_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('node_id', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('commented_by', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('commented_by_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), - sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), - sa.Column('content', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('reactions_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='mentorship_program_ticket_comments_pkey') - ) - op.create_table('contributor_names', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('discord_id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('name', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('country', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='contributor_names_pkey') - ) - op.create_table('user_activity', - sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False), - sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=False), - sa.Column('issue_id', sa.INTEGER(), autoincrement=False, nullable=False), - sa.Column('activity', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), - sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True), - sa.Column('mentor_id', sa.INTEGER(), autoincrement=False, nullable=True), - sa.ForeignKeyConstraint(['issue_id'], ['issues.id'], name='user_activity_issue_id_fkey'), - sa.ForeignKeyConstraint(['mentor_id'], ['mentor_details.id'], name='user_activity_mentor_id_fkey', onupdate='CASCADE', ondelete='CASCADE'), - sa.ForeignKeyConstraint(['user_id'], ['users.id'], name='user_activity_user_id_fkey'), - sa.PrimaryKeyConstraint('id', name='user_activity_pkey') - ) - op.create_table('dmp_orgs', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=False), - sa.Column('name', sa.TEXT(), autoincrement=False, nullable=False), - sa.Column('description', sa.TEXT(), autoincrement=False, nullable=False), - sa.Column('link', sa.TEXT(), autoincrement=False, nullable=False), - sa.Column('repo_owner', sa.TEXT(), server_default=sa.text("'NA'::text"), autoincrement=False, nullable=False), - sa.PrimaryKeyConstraint('id', name='dmp_orgs_pkey'), - sa.UniqueConstraint('id', name='dmp_orgs_id_key') - ) - op.create_table('contributors_registration', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('discord_id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('github_id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('github_url', sa.VARCHAR(), autoincrement=False, nullable=False), - sa.Column('discord_username', sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column('joined_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=False), - sa.Column('email', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('name', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='contributors_registration_old_duplicate_pkey'), - sa.UniqueConstraint('discord_id', name='contributors_registration_old_duplicate_discord_id_key'), - sa.UniqueConstraint('github_id', name='contributors_registration_old_duplicate_github_id_key'), - comment='This is a duplicate of contributors_registration_old' - ) - op.create_table('pr_staging', - sa.Column('id', sa.UUID(), server_default=sa.text('gen_random_uuid()'), autoincrement=False, nullable=False), - sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=True), - sa.Column('api_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('html_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('raised_by', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('raised_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=False), - sa.Column('raised_by_username', sa.TEXT(), autoincrement=False, nullable=False), - sa.Column('status', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('is_merged', sa.BOOLEAN(), autoincrement=False, nullable=True), - sa.Column('merged_by', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('merged_at', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('merged_by_username', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('pr_id', sa.BIGINT(), autoincrement=False, nullable=False, comment='github id of the pr'), - sa.Column('points', sa.SMALLINT(), server_default=sa.text("'10'::smallint"), autoincrement=False, nullable=False), - sa.Column('ticket_url', sa.TEXT(), autoincrement=False, nullable=False), - sa.Column('ticket_complexity', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='pr_staging_pkey'), - sa.UniqueConstraint('html_url', name='pr_staging_html_url_key'), - comment='This is a duplicate of connected_prs' - ) - op.create_table('github_classroom_data', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=False), - sa.Column('assignment_name', sa.TEXT(), autoincrement=False, nullable=False), - sa.Column('assignment_url', sa.TEXT(), autoincrement=False, nullable=False), - sa.Column('assignment_id', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('starter_code_url', sa.TEXT(), autoincrement=False, nullable=False), - sa.Column('github_username', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('roster_identifier', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('student_repository_name', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('student_repository_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('submission_timestamp', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=False), - sa.Column('points_awarded', sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column('points_available', sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column('c4gt_points', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('discord_id', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='github_classroom_data_pkey'), - comment='Table for save the details about github classroom assignment datas' - ) - op.create_table('connected_prs', - sa.Column('id', sa.UUID(), server_default=sa.text('gen_random_uuid()'), autoincrement=False, nullable=False), - sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=True), - sa.Column('api_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('html_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('raised_by', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('raised_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=False), - sa.Column('raised_by_username', sa.TEXT(), autoincrement=False, nullable=False), - sa.Column('status', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('is_merged', sa.BOOLEAN(), autoincrement=False, nullable=True), - sa.Column('merged_by', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('merged_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('merged_by_username', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('pr_id', sa.BIGINT(), autoincrement=False, nullable=False, comment='github id of the pr'), - sa.Column('points', sa.SMALLINT(), server_default=sa.text("'10'::smallint"), autoincrement=False, nullable=False), - sa.Column('ticket_url', sa.TEXT(), autoincrement=False, nullable=False), - sa.Column('ticket_complexity', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='connected_prs_pkey'), - sa.UniqueConstraint('html_url', name='connected_prs_html_url_key') - ) - op.create_table('__mentors', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('discord_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('github_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('github_url', sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column('discord_username', sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column('organisation', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('name', sa.TEXT(), autoincrement=False, nullable=False), - sa.Column('email', sa.TEXT(), autoincrement=False, nullable=True), - sa.ForeignKeyConstraint(['organisation'], ['__community_organisations.name'], name='__mentors_organisation_fkey'), - sa.PrimaryKeyConstraint('id', name='mentors_pkey') - ) - op.create_table('discord_engagement', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=True), - sa.Column('contributor', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('has_introduced', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True), - sa.Column('total_message_count', sa.BIGINT(), server_default=sa.text("'0'::bigint"), autoincrement=False, nullable=True), - sa.Column('total_reaction_count', sa.BIGINT(), server_default=sa.text("'0'::bigint"), autoincrement=False, nullable=True), - sa.Column('converserBadge', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True), - sa.Column('apprenticeBadge', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True), - sa.Column('rockstarBadge', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True), - sa.Column('enthusiastBadge', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True), - sa.Column('risingStarBadge', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='discord_engagement_pkey'), - sa.UniqueConstraint('contributor', name='discord_engagement_contributor_key'), - comment='engagement metrics for contributors' - ) - op.create_table('__dev_onboarding', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('organisation', sa.TEXT(), autoincrement=False, nullable=False), - sa.Column('email', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('repos', postgresql.JSON(astext_type=sa.Text()), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='Onboarding_Dev_pkey'), - sa.UniqueConstraint('organisation', name='Onboarding_Dev_org_key') - ) - op.create_table('chapters', - sa.Column('id', sa.UUID(), server_default=sa.text('gen_random_uuid()'), autoincrement=False, nullable=False), - sa.Column('type', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('org_name', sa.TEXT(), autoincrement=False, nullable=False), - sa.Column('primary_organisation', sa.TEXT(), autoincrement=False, nullable=True, comment='the organisation that the chapter is mapped to'), - sa.Column('sessions', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('discord_role_id', sa.BIGINT(), autoincrement=False, nullable=False, comment='db od of the corresponding member role in discord server'), - sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), - sa.CheckConstraint("type = ANY (ARRAY['CORPORATE'::text, 'COLLEGE'::text])", name='chapters_type_check'), - sa.PrimaryKeyConstraint('id', name='chapters_pkey'), - sa.UniqueConstraint('discord_role_id', name='chapters_discord_role_id_key'), - sa.UniqueConstraint('org_name', name='chapters_org_name_key') - ) - op.create_table('__community_program_tickets', - sa.Column('url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('repository_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('comments_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('events_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('html_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('node_id', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('title', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('raised_by_username', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('raised_by_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('labels', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('status', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('assignees', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('number_of_comments', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('closed_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('community_label', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='community_program_tickets_pkey') - ) - op.create_table('mentorship_program_website_commits', - sa.Column('node_id', sa.TEXT(), autoincrement=False, nullable=False), - sa.Column('url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('html_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('comment_count', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('date', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), - sa.Column('author_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('author_username', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('author_email', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('committer_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('committer_username', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('committer_email', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('additions', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('deletions', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('files', postgresql.JSON(astext_type=sa.Text()), autoincrement=False, nullable=True), - sa.Column('project_folder_name', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('pr_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('node_id', name='mentorship_program_website_commits_pkey') - ) - op.create_table('pr_history', - sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=True), - sa.Column('api_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('html_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('raised_by', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('raised_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=False), - sa.Column('raised_by_username', sa.TEXT(), autoincrement=False, nullable=False), - sa.Column('status', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('is_merged', sa.BOOLEAN(), autoincrement=False, nullable=True), - sa.Column('merged_by', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('merged_at', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('merged_by_username', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('pr_id', sa.BIGINT(), autoincrement=False, nullable=False, comment='github id of the pr'), - sa.Column('points', sa.SMALLINT(), server_default=sa.text("'10'::smallint"), autoincrement=False, nullable=False), - sa.Column('ticket_url', sa.TEXT(), autoincrement=False, nullable=False), - sa.Column('ticket_complexity', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('id', sa.INTEGER(), server_default=sa.text("nextval('pr_history_id_seq'::regclass)"), autoincrement=True, nullable=False), - sa.PrimaryKeyConstraint('id', name='pr_history_pkey'), - sa.UniqueConstraint('html_url', name='pr_history_html_url_key'), - comment='Holds records of pr webhooks' - ) - op.create_table('ccbp_tickets', - sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=True), - sa.Column('name', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('product', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('complexity', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('project_category', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), - sa.Column('project_sub_category', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), - sa.Column('reqd_skills', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), - sa.Column('issue_id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('api_endpoint_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('ticket_points', sa.SMALLINT(), server_default=sa.text("'0'::smallint"), autoincrement=False, nullable=True, comment='How many points the ticket is worth'), - sa.Column('index', sa.SMALLINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=32767, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('mentors', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), - sa.Column('uuid', sa.UUID(), server_default=sa.text('gen_random_uuid()'), autoincrement=False, nullable=False), - sa.Column('status', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('community_label', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True, comment='has community label'), - sa.Column('organization', sa.TEXT(), server_default=sa.text("''::text"), autoincrement=False, nullable=True), - sa.Column('closed_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True, comment='date-time at which issue was closed'), - sa.Column('assignees', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), - sa.Column('issue_author', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), - sa.Column('is_assigned', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=False), - sa.PrimaryKeyConstraint('uuid', name='ccbp_tickets_pkey'), - sa.UniqueConstraint('api_endpoint_url', name='ccbp_tickets_api_endpoint_url_key'), - sa.UniqueConstraint('index', name='ccbp_tickets_index_key'), - sa.UniqueConstraint('issue_id', name='ccbp_tickets_"issue_id"_key'), - sa.UniqueConstraint('url', name='ccbp_tickets_url_key'), - comment='A table to store details of CCBP Tickets from various projects' - ) - # ### end Alembic commands ### diff --git a/migrations/versions/__pycache__/5709cf5c2772_baseline_migration.cpython-310.pyc b/migrations/versions/__pycache__/5709cf5c2772_baseline_migration.cpython-310.pyc deleted file mode 100644 index ad46824b23d9cb43023b6e321d68bbc5fcf20809..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 915 zcmZ`%OK;Oa5MDoGJC5@xFR5_xHHTEGl{O`+5RgiUOAjr*Se90fcbXdf>aJ51&ioR@ zr9Xy0uvbo;xp89Ft=fQKt^Ibsnepz-w_A3*F2S+>db0ec6Y^b=&0!*WiBo(*I3tXN zglSCAv|JDM+z1U#aU(PHR@iE6GqXY)eXY#N+hH4hR_HH^4}iK{JrV;d2+s&1k-7769F;v-8qN22LA|Q<_F_oGTIGB<9&ZOm`x_L zV7jwAJ_z;?T=xe`L58|?PT_I|MFO&QR^aACP3mQJjEh^9qF!4L{aazV3r0hy34g9-{JXjw>$L%rffqK@4h8j?BApX!nrIVIQH@ipOHWcTn( z%xBS=04}0eVkM#@5FhHYiYi{dholzKeDda9BrakOY?PM_4yYHG0->5t!bF diff --git a/migrations/versions/__pycache__/8d1e6a7e959a_initial_migration.cpython-310.pyc b/migrations/versions/__pycache__/8d1e6a7e959a_initial_migration.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..eb3b18f5c4b2682c4d8d051591979289b197510b GIT binary patch literal 32127 zcmeHwd3;;NwZ9}wvMeu=9Y?Vf$59}}2_+#L5K>ABNr2FhKw?wSORLJB8(WSniF@TF zc1vj=ywc5mbfF7vsndlvp$lDKOIPUfpwPW(>Fq{Y+5!!wY;6sc_dPRn@13iAWhwmr zc%RSj^Wu+B=AJn-bGA7%cbVg2Yim4!e_h`m9DeG`VBmhbQ2&K-aSncs4+9JYSRfr> zK^Dpf$3p4QSVOuYNY@Sd@K_`r;pK2Xnr;Lxl8=owrJI0@rsG<(*1{Uk3b1gxRcp|i zS$J=#I{;ja z3&6(I+SO~Q!g1S##Rzb@BK^&_rNG$c*jQt{B$}Lz)>G zD4_EdK{He=?ZAKWQqd?6>zZ*zKHZ`YXU25NiP_4E2MQVIa!}6}hDI{^?4Xu6%sA6Z zT7emvV!@2+T6tVA4AHRazxlY>j9cTX7Ffk9N$%Bb|FwJhvbR!EL3Lc6F1&1va{4}uEsW8X2p`Dw@Il#nRJSKo? z0tTj*l66RoBkNobrqhF&=fNaBnEAlCHPq#y>Goh2crXh+m>v&i5ilgFlnXkGKxZ9N zXT4lMi$UYoPpTXrZJus%X=sU!b98iqr}xF^Jt})Yk@vpTO#?4FNsj0Av&@6(^R!xy z-ok7JJ9!F0!M4(iJangcFe}{{=wP|xH>+$Nw7TRy6|G1s4wqiLt>!$f;XFBXvun@c zeXVXd%>GMI}e4pUAx&Z)7S{hO=0VRRcis-YHXD4#XMS!dBm+h zy%4y4vPFXB{l|e<<=L1Y{}{KODfUEvc~%hp@p4fqm+WyytBx~T-#DW^kmL$Tq)X|# zn87ts21oYFCC9ev#%HP9KTF+#S?UgY>!2;8s(eO$9s}JOY{J%^!6udRtCjL=l=79L zjPYOVI|}wFZfkMMt9H^;p?>1J;Mt;1o0 zy|vDkL^(C9Hi-hW+#APjeG`RWd{Zs*Hqbcyb+iA(wOfO4g}y2+a-FXfVy#hTFl=`n zdpmTC*b7KIvv77^$-es>y*Etbd(ovkg-mZUl6-#up_po<6{9?TKR-vSt`fNs& z@AJ{C8eiYbZUCQ)Y^fZLQtS6usq01E2iOOBq_Ni08>{krBl{4zp!J+Qa~dnPC>>%S zhHgG$>*hxGQC~WhryG4`cPP5q$JmXI9(jDfk!9FTm^rVq;ybka@v7W@+;@aeu){81 zkMQ`t9(jU&Ee3vYDqB#zX%=jdjiW=CA} z;xo8*&x<2iH9qNzEVL>fVZH1G6$`E1;*}b5wzJQ=c4fCzNT^DG3;SG!W~}wRn|&Vr zJ2N=JzEEYfFDj#TvoC?pEzcKVZ&#!Q=l@luuP-ZoCD>OumEVryRvuly3f?Pr6w_Ty z)O~H1I`W2U?Y@pWZ^XXUpA)P!-(a^o^5XiN71!TVuetx<%5Gy{by>0pG`B1JgoU{N zw!LP8UMYV^DW7VuDBtz>R3&16*WW`Gitn-i_K8l;Tu!j>u{&mu2+;nW+%i7eht=<_ zqD89tevPs3WOvo1xVt9B4{B2UuqMTiYEt~TCdE%`Qv7sQif;BZ=**dC3vneq273N^ zRXrc&HEOg|THM2aRimDsI6#BAA9U_( z@A1`--#T)SuYUX}O|Kjnxt;ym6+z8eMt1L~%$oOpCi@LW5YhOzkneZWsy)nPzjsA1 zlg+my)4iNe{^Y=U7Q{KFX9vy`fqU!b^xm4B-dmN^d)XfpPP^D`JX+lcek#@jc*<>@ zbIJ&+|}al@HY04!p%Zl zFSGUbu)jW(c>S=yuP79c`1irJ@3#IU)@~a7s5@y(q6B-y*-69pf3f#Zk1#yt6HkV! z+jlyJenH8042KShJN+(pjO*T7i}^D)Tl!;O?^gD6$7YE|$7Ye)y=Ob-uRpGxN=ka} z=NA{d*`w^3V`;4ae^sw}6guX+pF4(#MNbRWryR$WryP$d8mN30_E<%nR%7vF>~Ah! z-F1F*nPNg%^1jz^(bDD<(eS`;_d7y7_scK7P2* zKf#{DeCHmIxbZaS&o81p?K^tKeLoA}mDb;Pm~)47<&p8hOi3^9&E>TW%a*g|9Elt^ zG`)}=)6T=2r&wV;k5@=}4R4Tg#!yjbnH)0{L;0*>=*8k#rc}%o%6Pw2(v6Is%^4aq zTX-4orL+UM;x%KMK1^j+8n#+AjbUJ;Tr6m2b6(3bO&=^~b#^#pwr0o6=ygcf#jdaW z#*JLoC`*cV9WV0=!x@DrDF->2vBc)xn^7rm%b;E?Ov*vc79*F(n6kV0a52xUvOYdI zAtg6J2rk8Vjb3=GRn91h~`^;v&2wRMSF-i;Rubhf z5t`lO1*5Ew58-7V&=#NEl;N^=uxv)5$dRIMHV%zs3k5B2w#Y(;%xi|n$8+qVrBLtT zpc&0;h2ipu*(}-!iWYf6!x?6y0nHIBbIC)b9l>s?1m&5{(0f_W4Z?Q18Odw=wfsYC zx#`G?MIC-K*`v_FU9{mG1mLe!SE06D65?TRHkUVmGTSp5SCjoiW>Y33=nQkIT^z}k z$d~bcb}R#D*r%1zZj9&*bBQli34RR@@WRz7n?1fHq9Z;GbvBOIz!UZ;xI)_u*92F2 zvVv7<53@Owfrj!hbD?Z@WH8l?5$@?E+1}i+xyW~LNHeMp(afTLr6>tB*Y@KbF7JS} zzR0B>Riha=p*bg$Q3W;3g`zjNES8|%YGqL$&K7b8w~k?+?8~H~<-sGIPvyc~u6AQt z1^+X+YUTh!gfx*c)_0?c-cMkF zz@^kfEN2)nc@8m3)bq#`<$=LC+)&ZRY~MVvYuDBt{h9u47j50$zv-fjG5>}JOP2Iz zBv)YCK{=S&=4MNb%Q=hqy(MHy_vA3 zmQKX`HDd?_B}5s+jKR)gvd^(12#-QJHnV{ZrgAJ*+?y(oXeq>ge1K3w{DQ_)W7^oD zrl*KBl`Gh$6Eg6Y#TU}j;+<=@(PDX+wBP2;L9lF?VIIR;;duq&gIIOUCb0xjkZKZZ z4i(!K_vEq~W`~NBF`ZTpD}ouc%=IYeBr?M$F&ch47 zHG!#IU8=)UJb>eJT}F+e5p67Mc337y?9UGK<(Bj;*H5wH&2g4kfoQeXQ(5mUrxGDn zQ(8`V|;T8)?Ua zPkzif7MaCi8zD-%6w5GM?K0mLbh)dEQi`^qqJ}OlW}4~3iu$c`!4$J#=EMt%2U26% z!bD0S_~aKP{!Wc6WnLA2e7Hc)-z#U@5kPP zHncOF`{}fG!NP zQ_7XYb`A=aE3UQ8%`pQ@R5NV~50?2#H?v_I0|VGP!f8_Y2#xrboRUTcvxas`3JY#N zKLL9c`pT(6Erpj11Q34EBDs@ml657tH~CUm>Dr zi;6N&lC4yvktt)QC=HF7rTp_c`UH=bM2dW(ev+#%B^-8UDc?ZXpfWo|C{aVFwGcU5 z0PeCB9IYh+v&psS^EInS?h8eam~To0Xv8~^N?^~=RzWBCxic#YF#Y~)etDk@PvnMn z1tucmJa^F*=A3L{Sj)>DG6>B1l?+*%=P2*Xm^s1SIbYN&dld@8$Ad~hYZDz|8^w3) zGbh{qxGVg)Oh}SK5%&{}rsopLmb#HQa=0pqJmftz?2t-rZz9B}G6(Xe^ z>8T;HIy2v@s}%P{A9gI&*mn>o)F8@X-iB#LnGt$00GFMm!ARDymRqx(0{I$wNzW5B zfm-WDt@EPRdr_x(QKx%RXLwN?yr?r>sCD&;Er(`c>5)Xq_2?oHSYL5({ll~hDHjL2ak8KpzOukK^{0#Ebik^cId&#Fc!#}rPVi`$|gK+ zJ!QZ|ZDyJ=q~}VcqM4Hhc5P3Ui+sJ3^B{$lLZ6^TZ1ei_`%?#U`*Jg}K7=${?N-Qr zg_~cY*G}0%#u|)I$%M6`l~`kD)ut3yM6B~P6|=blt@Bu9Q+*eUXkzry{RkMMCb${#LexO&Wx zku{^6i}~>}d?3CXYaAXf#LvwQZ!46wVcMZuh0crh+*nqhc$qe_8E%g$fE}P2?xzP% zjpr6|Lzpd?AvS16$BK+TJ&$ccFzCm|%nT0(GRwE^lv?X>n?mef6=Dx+X)r!-+EcU8OORJd%&ie z`i28o+;&LOABH|^Sd!KPifZBrCySBEd$y>o}zCF|s> z>x_9gpq~_kAOvRg%^UX&kOB8>GRBQPTQp-|xmemGL%gwv+;NXFlErhql|qXv?ZXGg z9;GO68;m{bp@0m$oDM8xY{FF~+(nL;@OX@A^PoxNhtWVF8tMqTe`3M#-=oc)6<9Y- zM-Wthjn3@|cH)O$JrixTXp*w6=t*FclCmw>Ms1&tx5R_Be`3KXenR*`>6utGj{kZ2 z*;s_|(SOAn1V!|@SXiyCR(4uZujm0i30hIEUQZ)fwcg%EEw7h!qW7LiBA5)e5dVBU zbwuEvZHctZQX7s)E}!d(^aRDv>^+Iz{P;_Ri4(PTLh{z*!dm(u?h=t0tmNoilpS5G zC98jx-fcau9iesD($NQv;#{fYTjfga@}8axhlO^yPL=**kv6A4j3lnyyt*}Ejl*rm zc@zAQcR2p#lvEj=ve2jKS+s{-c5kW-i7-j*&?KUMt)%UrwQR3yiN+#(hApppPIwY| z+H6*eg)MFLa2~8NEDfkyvS!r(8!i#9^nOe7ZCStA;%HEeKpy7I#TM?bG>YiQ+rQw$(yi07r^3^%-qFu95vegsCO{t8RfHsja$&M9=HZ4^h^=s4SG10859M@FOg5Fw1CMUisjbc{t0 z1zDr~B!wtrz~UqOa^q;6MOYL65vArS9H~Ov7D?TVj}uzy+@SbOALFz#94_;W)4<1R zca78P%aK222insCtq7YdkG#-G#3y|o(>UG6>qOg5j51GtP7-9vy7@`g%}TL{ay^R_~-5!N#mJ|vEvki{3W zMYhE&uRHLu?LxMgrSO4948Ke8bAqtObO$>Tb**f^sFNS_1c6JrWN2WH&KB0KvxRl* zY$03f8ZY7+&*N3>B;oDTbHSqRcV?_;N+mOhD5 z=hsR;)5QE*WqVT}J5|uK)l7Kz8uYc+o~wPc$n2ig>-_boNWRX$FN*?xv7Y8VL}HhZayqaZNnNbWa*|8s3UpUpP=L@m`X&i2<1daI0|XA{4*`~ceA z!nL^-J#b$@KR7-Jt|ILGsfaDZ791c{wHjnws>raVrVLx^mSGFmxi_}bhlfsVz0lvM zxK7-${=!*CuB0T5P~2%yV*Q2eWn8D*>F_D)#bMoxZ2MivcGRe^I;%&?Pj@bdS< z7o+|XD{83`4eQ-bwhR0?no?ue&KmmJZO`MKHT0uWR*zjfE8@MIm+Be_vi{?kV+l6E zJ@-;+2Ui@xEPpv0V3)B=G0QuYSw6rn_nBcThXZt|)4Ox9;tzB^KnFZ)Q0%Em@rs%h zudGRtsY!w3q%}Cr)}$bd)wK^p+#6tnGG+wX5MyXbpJGQwyF`g;{2;lrt`0FADxH@n=`wH5_p+z9uSoUf-?wWG7vV$G2|KV`MCd-nCg(Ih@A-RFv$G{;ArISwz) z<9dwcR4)a#0PK1?hVJ+-uti|K9zAj#b7n4jkjGqe- zdihCLNR=9gKFcaa`|b2OzE3sU5U-YM5)~Dm_j(Jx+JCN%m^C)c#B}h^y++ z5R|Z`6DcG3gk2m_r*@v`1lkyTjWZgL%U3G=lRWBm2M(@Yi8#H?mdDw7cC)Kcr*e9w zzjyOd-7C_S{$B1&St+42->ziKs!D&QS9)vD2}``rcl_7mpl$=Z+P=?u9q*&AXu&mk zjiSj89NN8xy@6dT{2nXZ8|{_p8sE{?xP1+K6C|5G+FZll>`U_&9BoylxaK&dNU|yR zHXM$1cAPdg2f;e_d_Z3`r<-Xm%uRt9guA$ZXy|&l9hu>caeNnA?m+?M@%J12MxF1pP zGH6EL;PR{Y=4C8`3ZUu zS9P_7-Ovu&u4^tL>K|es7EvFo+Vy=CZ#CY140S5^ zxT=>YKGIFDkv`6Ds%pnipzg4ur<>T38aBA8K21U&H*|-y_jDS3s90!*;g3N7qD|F=XW!OyAes$K`uld!2kFV6t@mG$EDiw5`3A+duRC)ByTWha+P*84=~m_f_f4tmAp2JRqeH6O5Z&ZDDidZoYu9Zx@9l4^ z+t>Cpzo)6U`?t6D^5E}w_HA1;-(lZn-?R4!x8wM;Wo762>>a-G{7%f>@7pc!sQE0+ zdDa|cckmsgu#Gy7UtIQ^Wp~xR=ex^a8;~-_+`GfOxQ+dv_ulQx@7=R-p{}Mub~o=^ z?3ghE#XZgeJx`QRVWtB=}tw+TT~f-&+O$2WXFWk{rFk z<_LB}+3$T=1AL`t3hN~_@8yri_?t4u<9v(=dqTuO%-<(@%m4Ft?_B@i^>?z|Q)I0} zo#YL$)YG;%RP#^FJnP*myiL3rpjn6~`n>JmJwDjZ5Q#j|_aDGjMvBcobci&bwul}c z+&_WCtahyqGr(1jK|7GjmEaj4ob?ukR)tOMSt;FCQ2M;f;F9~#zUS350a~?IpDb7AuYqmR41%M}S57a&b*svmYwbi_^XS9J?%Q`Y3(nZwb7tt9w3 zfoonQxg-xf(H^r(YzECzdlY;30IU-!EUXyeBui4F!S6CYqDc2kBlg)N!s}{V+clCZ zr4y5^@yX&&Oj5BmVv;)Z(1-s_r=7jq>NMU82aBtdt(cDTi<}y{!xZ{Cr9wX|xqcik zSasa`p>G`oXSRN5FYLqZYGA$5eT|J(b+*>GhjqYKtcZ}+e@@XolUHM>`}S~#hgN87 zLxpzENva+uvG@97=`K>a(KHXUny{R@6`{eHv#Xbi*u{cZ}y{Sc&6and&La5rQ71$%Y%Mc z@VK=K{=6#q^Q+)50AAVONIO*Q&TSsIfm(@K9KV>I1AZOh{(Og(>< z_-PERXhc4s{DJHQ4v*nHpNkfbqD7Qud%IMTRDZkYy+`ltr=#eUSP_ z7OH^$4k}F$IGI3(K#9N}0!SVl(05S@-^m8_S5xVg1Te`0I;05b&(UR)z(E4f6S$MW zSpezA&6{>_-n3;a{XvvsL1c5F63&uoI2HCUBK$W3UnlSl0?!awLx2(v>FWqwLLdu} zd;Thvo~29rlQa592z(U4bsEyn@}kdB{m%%TO5mRa{++BTQUH@}eQVS-3ZUm_3hAUd&w8alMx|o}9wzVzffp0_ zCV>|b_!9y83o`m7f$tFbGl4%6_%(r}1Rf-C9)Sl4+)v;>0)HTIFM;0^_#J`Y68H^) z9}>8SfJxx91b#)}mjv!6a3+CY5coNPpAxu;z)uMLn81$+{D8n+1inw;dj#$vP$cj^ zfXUeIt^KJT1KYRj|4q2>61bhfw+VcWz-K;hEd<_7;7tVHNZ?umZy@j=1YS?zN&>GT@HzsR2m$@I1jY#*Ah4f6nZOkUMhJ`& z$P?H{V2nVHz^e#o1ojem1%ZnR>?E*~z$yZ(37kgYbOL7pOfFZmmDve{a8Nv(u@l&2 zN=TssgO#YkDfKjA9|ijZIoOESpg^;0FXHBDsyPj75;n_&rrc@ zVk70TJTEmCaA1|r+eXT{G9q7xOeTZ`Zgjeq=NoEqa3GAzzpfNTKrN3W{g6lxqT%#z zM`4^YBxQ#n1 zLIcihT%KhnkyrX0diPD=#chyYQ#Uz{L>8DldpnLvSEe9JS!3`l2mF}zML6`3|PWCfB^jF3^zCplpN%DARZ zV4TB;4fsrgAMq=ivdEEzD;z{`#o26;kb#<|ksnOt&J&~YoJ%4sq}xR2rR*>tfnU>T z(h<@f&Rn!`lQh350mv#oIsKF3$th@1{s=o+$Ye7uZCkdUw`pK||B8^KzM zIm+}f(WfI>onaJ*a$toa#RIZTMv>1C5~Sx+szAkKxeAaF(S@uS^;}}QS%x-lhbGFN z30EkgL%W!fxhr!l-R(iz!Xnd-%tNW+u)W*!|Euq^~`f4 zk*Kw}A4z=XJI#6W7@nJB7!@r}$iyhEQm;b%^im(qsKHRE=cS~?J|9x)2(EdqpAG8C zUNsM;YXuS07r8KHf38KCr{YM1Wxh;_(O-i8^krlrZhRx{PThyh8hj<$KxC&7=p!(X zKr_IK6M4!FJ&6ggCkV6?=pYawqWM&c5#XCBdr9X$aUkEJSG;(Kpdwp5(bDq767|dqXS`h=u43vvo>3iZk1EURh)n zP;1mOKPPNwb+FE6s}yS4IalnI)Oxk-POdeg)?gKAm+|TdU>D_9RnFB*9PK z6aV5&wUAewnI^dfR_#ZWospu?^@gnz;9O5KD}dA5(P2wOrBqLUSN3Y1@U}QxcAPDR zw6!e(j`>-8=cKn)vU(uPla|4eeIDGa4@-OQ*3Q{;(hTJSu)L`^LRPDkTGgs$$rpxm zMiS@#+sKD)@9;16hI@nbquPkh_gi@jEZ&`*4bCW5o&%xrMAXrjg;DkAXwuR$YLQET z^h_T4LiH=!@%hSg3s@W{A`9$ZN%ko6xd?rTlDCDG{easISvtISt+bux0oIrzYl3wp zY)`$QM?1TQ*FO`;ec))o-%gfi(1?^(K-HFGk(Nj=j}FwEgRRy-^|dUrEGT}e_eq&r z=tuRDSk#|`UU1NB>%CGxy_i*2o(5G*mZq!e7y84Db!vr9xt;9nI-=IXslBVM1xG6H zYFV|px0UL*(J18;qMsi8F2FC>(9_YDuAuePDN97vgcu_}V@K(X-lQGmKp%>6S8$)CeZ7{+! zu|{X?E{TC%;+1VO;VnJKz@t!%Mzb#0F9!s`|{@U3QdNesV zbS&8!`b%mg+F!W%uC-gvaQRx28-q3x?#i2j6 zrDFHSPN=Rkp%)>Q&>h$1)3FvyYRK?ROQWz*B#=jz>)en?mlnq#-_GVKZ}J4+sio&Sh*ik!PnpmSv73uh>joqn zqx|d0ykErq0;LGdWb{Vr9UCNU+Xzt763&tD#wvCel7EfowX;DLFj4@~K%%h)dn-uO RK(k~)Fy0Wj%KB;B|37rOy%_)i literal 0 HcmV?d00001 diff --git a/migrations/versions/__pycache__/947179aaa97c_version_added.cpython-310.pyc b/migrations/versions/__pycache__/947179aaa97c_version_added.cpython-310.pyc deleted file mode 100644 index 276a98769bdd9285dfbaeb83fc8465df175872e9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 32678 zcmeHw34B~vd9O4hjYgwg8cDXSIO8R@<=Bq5cq0jTOJZvoaW!mub_BWiKedjIhZ6BkH@J|OWo`a9~dVHO06K(l6(Jm5| z_W49UF`vvQ+vz%4>6q`#cXGL-lFD}h)>%oq5`Ciol^yv(lm^5gN<*UKOxx6OW9|ix z>y>NOTu}%|WHP6ni)Fq(cH}^A|L(mz_UZe?%zUWpM*5FQ%adqmno%9soRt?rqp9fy{6P>O8urZU`m6gG-OJ{rZi$oSyLJ{r7=?)H>DM( zw9=F&Olg%VO`6gTrnK6WZZxHwDXlT3wWhSrl-8TlO{TQLls1~uCR3U+rOl>vvng#c zrLCs4&6I92rR}D)!<2TK(k@flZA!PA(jHUVYfAe}X}>8QFr{ai(ru=6yD8maO3yZ> z=TJ$s!6Xl!@Ow+8>4ieQT)N=Yp~eetVXo}eYwnW2cHXI?;LVj63JY#+)-BE#E;-X) zx$YE7wfXrn+OH~baj{yiFPTPquykpzxKM}5`-6poa4y0&rfWr4l&iD;uqb<_nkx!U zb+$O`&{TeRp`fPp`%1NH)hX2-fl)nwbhccdTbwSGDn-w8Yqj};DAtRNtyeG3(ts<` zqwbcc7YSvU=`5&li**>ipBD2Ag_=9-`F)F?;}(jgdif#{_J=}!3f1Dgt- zMNBFX_57}jr9!2K0f)<;w@B;==Jv;kC&6_=QR?>=7Z!pch8UHS~u7Vqt3RCU*8$$96egbVc`~vr471?BKr5IVbRQ4P*eLvv>^)UKvXNkLXT?Ru+$;AhN?$?pjP$#l;UwVBo)t??K+D%wcJSo8RtRSs8Frd3mn`` zC0vTn5Er;LsCaR@;=H-dy$2xvdZK#x!1;5;nDYm{MeqC($GcFkEu7axoj<($uCwR8 zxuWZct@AYjsh&M3tnFU40Tk!k<- z{r0!JCxJv$h?E16L!^sGb%<0?+?N!Idg8&3D{Uxuilpe^-%gRj->%DDfTTqFawoU! zmMsBE0@lH>OcN|65~4@+UQS#=WXz#hAGho`EonsAdjPPmD~OdtBMw?%Ll)Su885(Q zq@KR78)IcJw@1btZ2=#%j62>mZc6lv6--f&qG%;k6y_Z9i+=5#n=oSrw68L-0RKtA zGBJMKV1ca;=s|6dKmZ4NQzEA%wFb~QU)P3U>%@B1@}>|}OJxIKy`&Ks8KGQctCnm&J=JF76`M!3s&2YJ2VS_-BHB!^L-HaIUbw@q@pwcVG zJ>c$6(>j3dvcPs*V7FRe(6$G=W72}AHNo~+`tG&B_E}*2EwE;rONs+%2P+O~{h&B7 z+b*7k&u!v%aff&|OPo0V9K-R9I7nsmI&?X!bo+3a>&$Jf7e_D$BoUe;DF_z{3nXyV zzydzs30PbsL)MDg9&wkFmzHkGYOVEmH_bCBGNAVuKFb9kNDnp};Zst?wNCoZr170|O}He5ZA~hpUDV=|;yz2?3l^Bo zn^e#avW-~Ryk(B6WxSdN7OZ0_v0(iD{b)I?IOE1R<28*!9IqRW4~j)o9v2s>d}T!O z{gUB(T(_6I#y`+Bdr~Y}U=0iGTM-Lmk}=!FO5YW zFKYpRc?Gt()QJW z#n+JKuA@KDw6>Bo{&%2UO{8JrJ%7W zO+jP&rJ&WQ)+y+#09Briy(n{6)&0a(>a5YJ>i3gYF2YlUV*}Opr>s2qX=~e`v9^V0 zvW)w)fQD8fHQUcwM~K<7YWwrnw!dJ7{-PE7AfO??qsFZP8j6t=^oj+V6650aq}8u& zKi&4DZOVRriM9F-XdmBU{c?0ZZudqD%oe4+3GG_h2KRc4=@odhz~%$kZ<==64tlk6I7@Mf~gK6jT3RT^q8Ve?$M~6SjYEiSgC^Ol(EvIp5cW zIeC+e_zxwuKJjkh8>zil{HG~r#QRNoKzxA8SH_eyKgeT~H^BOf?;i1?SafAO4`|+} zC&h=G@Z+5sot^NwmWZiE-JrM|+DJS6j{sNF$lw&g@e@py zT}Oks@sko`#_m%}#yY0_^i}HmQ5WA6Jp>q5As*{9v9Uhe!rOdKj>CN(<~{*#j{pj* z=DN>E>ITKT%5KkLUCSKDn+L)IVxev7S`8QxPqF(+gO5BuQ+V~=jqWXt$N&*Uv4@XNRH$XeA z&`#8jpf-h1S9GTeNZQOActxdkp*f94p+|R=6M$rh`YS3%!F5kqkD5*sLs6c>yB9bp z--6zK5lu2@d8p~fJZkm9)d0_`_At1{&LehbD#tclYw1ACAsO5VRfUbnSs`Ja)He64rn7H-ME#!w(+h=xe ztm}#!&+Ob_d)n3&PyT=I+{kmSidv1t0`SQeaJoN+*eilHc7W)yn!Ez{?-Q@X^^Njc z?+NcCJIonR5qKRu`V>DgvYv0PL0{XM-$V;IFtxS^#a6gFy8wOGdm}C4A;dbMYc0_V zo~ZL9rga_WX?6Y`#1i3iV}0j`B22xy%XlSdA6}VdTx9Bi$NCo03{F+7cWQA4V})NW z`q?fugZUJV4Z>^HO_8;V^kW0@2t4UyZP`dyfNp|URd*9`g?pRe8Df5`gkIJH{_+;^ zSG0iBdS%n3V8u6a0G)|j+d>XjIL^?la5_bct-s=b*6A*JgU!G)ddDlcQ)5}R(ZjNr z1aynk5$z?w6B>PM6I5EyHqwPFD@^=$3$@qr7s}U&hB30zNz4~W&azKKOX7KGT~?UU zPB$hUwB6N&FHu4Ft7GR7fZAtM)*9x6h@JT<4E8q7C3&-td86)rV4nTg#~Tss;RDXx zkGVoV93#rP4)9#+ITFnEEHf9aQ}WpqM7og85sV<$E;B0H-_{Imy8ma{X=)FI-f_=M z5!`JYyWP&;cK}0*Lpe4@-Ln}=ci4mIl<%=$={DMQ$WQ?ZqkL^`It)yj_X9_)zEX1T zXpD1r@|^ZPZeURs+YuHk3&}9pDSTt+FqTtv#`HVtGzNNM@neAAWzLA+Cnd}p7d>6T zxeZ$0P4Wb1aBhREW9I6;xT0qrWL*YV&ofs8xH?W(SH_L*-NW3abtPs&XnVqFSs>Ln zSYT>T9q}P?&AL9Tk~+&zPh3eGTu&KXmun}_H`s=~K(m*RL_tIcs6ValkxN|8FyDe5 zC7#AmD?qE$ew*zCr}KuRm?;+F^2naST(^{XoHNqA89iS>SNQu~h*8*|a9sC7gB$bd z!5*}@w>gH5c@Q~%o<;-*4qXns?2F8J%{p?v3D2a}8<0nr{#er!;W=vKo;4}1{=ARG z_r)xiKu5ltakZRag9k%RxTeHA059+u9Y)W-$I$!D=~)r;VFYk9tbNk~7MQ!FpTs~} zz<*~^J9>q7?~Xi=L1`w`X4clmJUHOe(4c#$cX^Tk3Z(3v#vSHL-uUCx5 zo!=Lcn|w|w@Ri?haBa1Q#XWHRKHZ!J|7M++PlZM$DLMNR!%K=(Udp@-@T1-Ib?mal z)paP)4LUiB>&rECD57sT=ZRYouzz_oRIClbuJ(+z#Gbm;+kc< z$M(%>5&4W)19!P%8y^QKt@lwcN9)L|aTf%4+)(;~(5Z^K?*=$Ml@)l9A2gJb+<%BE z?J>UNhYh8e3FT|-hnjY9QcKOLQ%En24q(2`Xj7&tF~P=V;l^e=Lk+ zL_5B-);dZ3Yht?i+A#IYkdVzXo4svPY-+*%&@*Yw{U0~;$8Eu~@3%dZ4oS!Eitvcv zyw%YC6KOSC*TPot6Uqw6W?skk@{|1iQxU7d6Y8j!CkH>RdPq6^49g){ZGP79;ag)V zflQ3e>xd$v zi2D$W&)0DnIHP4PDg8x5Df#gS%@xH9SLh0VUyo=>-CM`iFB$4vt;BK7T+T|2Q{?zM zCG1bBhtJppK3y>V1DNF$QB=?_cHSuF0gJ8^NrC-A6FxNyEPTh|dY@D`n1ZX#XBARD zzicEzcKAjmkvBmiSs9T7`YT2v-xABhh;KHOhH?B=EA-b4RBINk$iHr&W(4sT0}HqO z4J*{%))IqauHTBBC7StA^V=9Z7Ciy_J67mh0gZaYB(KKt{YUKiTku{=>UWiF-UivI z|17{Dz94?$ZHqy7<7%26qws{QeGQhE)K4b160O8Q8HC7^!-D7-1Y zdxZR-E^{7_+CP;L>3^?!l-)}#OBRcrt1s-}Im0Wca`6jJ_@RgcdsReyjFL))G!Wb0?fESVbWE$*}AEzXV9*8^05t6tSnnFxuzMx@Y~5Jd!n9eWBI=RiN&R z(5GmGq)!s%o%i55`FEo=KPit<`vl#2f7Iw3ooha3uISGD<4XI!0_~e@=cHhpzS`c_ zcEWugGO_rHvN-z?F54=d_?8#9K zS#}Bw7n~(z^Xs7GUG8%U<&4V7Qz#2Rx#(8>^jv+uQlQePszQpS=_Sq)Mcw>?2pHWm zAOtC(M4?#6*y(zC-f7gTj-QdKp$P7GOxMJc--&!tz-!YoQUo!o?lDGDCoIBT-2t5$ zRU}kRP9sX5#8`IU2X-+|jm~QA(x$1Yq~DK9Wcophs+r*{k(PF>_sUfSq7Ipk8SmC8hE?h^8=O(jDEbSb9zsad2obX>^SlZn$DzXO?} z9E@FCL^83Zd`Ahn)R0H=fd$7cQ?f<(#Xupy&X!H7AQm|<6jI2M=<-{y(J0_l0nOdJ zF>qsT)p?)}t=hCv=52xm3Z8TS#sdeCGOa`z;->tLO1XN$PwC{0AZDTBJU}_$Qr?Af zg%UQ_ozh&jhAeIbs~78wQ002L4i!v^8n{%st_;E0sxa6m6RjE}}LYgP1 z?&NzEUCf~YmC{Tb0W|W4Aa`YWE!j)NEb!6FEfAvJ>)f1WwxF!WbB%& z)h_sb-dt_5A}CQ;c?JeNQ>=JSepKt3>f}jlJwjMyTnGFvQ%Xd4EdlzaC>D^mklYGi zQxd@9LYc3JgAN+jAnl>tecK3|ygj3y&izG+`KYbra1UwBk)wAWJa_yol8TYixGy6# z`hxbPVngOq^Q5unjLOlM({;H;vgI5l7o+^CyD;Da4ft~UqP$y;A((ps$12w}^y!K~XJf2M^rLJkVF}GolBHL5I`y#s<{_Lpv{v6 zH5$4^zFx42?H^rDD+xy1lv2^5C_^c)=?Ij}fe$AZLjyRapFbJ}BHF24gocj?-Dk=b z^dcuhs^1$ZIcDplB?)GS#1vLbDXZIwK^=80c!;+a>vJ`i*K|sA=y#BP`aKdX z$?R1ZZVh<&T^^JPD$&@~GMi&;J#K7SPD`4X#djNFH=x579jOe4lA0EXXbutUUOt*% z5h+wzg`P!&P;OIBMmj_pFn6fbl$7Naixz=)HiLFGgLXHAZfyqbX$I|W2JLGG?T>&) zC8}_BD*h_4oEW?l>!by|%MRXc2j6N3@3DjT+QIwm;QdYDekFp!43f=Kzz0vu;`177 zubO~Uu4%h0`mEDrhLVlytg~|c-)!$h8HOW$$Z7Alx8=;Lk;YP!6dA0i2S~*RHGPz9 zca{m{p(Fl z;5gRAh5STQGoF#^8aFo8tP{0%KW^>io)R z2UWpQ^NMJdyy6;_c3IEDf%14PP*!tz?mm)!H|^KhKpW|e2qf54_2MhNRF7JUDQIhq zQX@EW7wbDBri(>)iPKi=HleLxic|B7M$g6_=rdhLI%>oxdJj3h)W~c+aZpX!=rehI zIl{idTq@TBuR%$FWq6$*Yp!NmbIov#m~_pJO?MYB0^ywNP?l|vA}aLZeI67M@l8(U zHJ)mhA>dvTF12=MY6_UtAO@H=SH^*Men@S@w`e9w?S(;g5P z-7F0l;$#?(=-lK%*4&}sVGqf*gPbs;!5R`@Bh9XNfzwxGv&V^$={gN7r$qnWL${&> zLb2`zFooYc;CE9GT4jp)Nt%QPvkHPyh}81hMI-vXa&qYq{PyknjiF7E%I=IX!X)12 zV2jmwJOtGZdEDJJF7(m#X^bmhid@XVo;D-$%>#HXfdRlG8@E_pwKZ9{^xHd?5rxy8 zN3&IjahGm;+qL_E>1Gd;UtW7!|K<3$#MLpJh)s=cMfh4yTArJ!x$JA_DFUnGv#AwsjF88I7ql{hNTYq{-7I`WH9TH zk;gGr3lNRUK1Ig2+X&Gi89>O?^%B077?z%o@xN$d;A|3a}evrY1+Wj=cL4!<*5qF@o4CXK2kyv2w0@ zEg04rUXe7!8h)?~)h&X^gSAtGV?hUNr>@iS#%AJMXL%mSFZmwDB&KqTD%2rDRO#09 zVoWcdA$n;o42M-YIYWN4+Q`VdR_`v5tl2~y-D)v)tEc5Y`5nt{PU_6|cE@pK^e22n zOM#cubFkCnvz&Xd(UWiL$eogJ>fqxYcOywKa4JTO#2(w8KR*&{!i?QOjP0O3Wqw4i za{65nk{LGebi{2vkD@(bxCqplqSnKOE^|J7=^7j<+euM|q1pCT@#+)g` zpmI1r#6k%Enhnz zm)Z?V!NPQN=wsfD!A7o6`CdwF#S2!fz1ZkBwx|qiFzFE;I9f*uW1@V#yN;&KN}GT? z2pwoOZaR1Rc&=V!R)I6Lz~{h2cZvANig>Afq3oyWn+|6htMqw~K3CnQ3YaWU+b@n0 zG>l^+I@Tnpiv3mwm#Vcfj%^2X{9Xk%4xHDMKZ6eBHl9)=Y=oi&XPxdEA`A^2>y9&`nO5*s+SmrE8)THilF?wf~~~B$LthW+2J5iD)ff zX?6`q>sTpP5YoKQWG~O$<2{vj`m}i(r$$>sl*NnzXFzd-*t6W$S`1R-^0zLxU&JsP zyU*f9RCH)k%$d+Hz7$5@^F%L6a>qz0LxC{WZNK~gLP115^r|Si8jptn;hO`E^{wX( z-bm%rdsJ}mj1T2WUh@34KLmjP!(Hy60T#GHj=Nyje5gzfhFrXcjtyO}MgF)~U!;`@ zw3#DoR_HNH1&xu#@s(PR5G>;*R`ufif_bY`@e z0-5H5j6cL#}cX}V%n!_tfZ8)poFnBb(bjY6a_1e$LzOE+#@mE}vWy7}P( zmPRMvrt(LCsXTMx1Amshb!iO3d4B1iw6#7^)YxFoE8tDldjmm9{8l1+UtP_dChw~O zVUur6-VJ0teX2nIe!-=a9J+b$xxF;$I^x?_@^m?9-=(>7X-*w!UZSlk-w5^9fu-cA zk0&Y$(j{ScKNNoBp)`v%5s-EJIu>aX>|a+!;IR#CHc|;^&8B>Z4qOpR)2kNESuBxz z`0$}qxwGW;HVUwGD~N4xy);E9l~7aTl&`0j(J-M3Li z*P)Xqj~~U0A_vd$yY)idyD$fA?6lv>cIapC1TAp;u}RLIJ9Y$bV!h|!@#BQl;E|K( z4jn&QICc8y;bUixojl=pJogNJpQXE;1Bb~lgGqu{rpeL?MwVX1%C9q{hn>7PbhWjm z5}Vtz`0Z`)cq)~mlKfrKzCyzvPxZ<+RFlJJ44+A^B}gyz{X%M(AQJ|v$I*YsxLu+> z^>Aub!)P=bZx|zDjK@-aG!Ko3c2B0dWV=au{X{AqV4+8lZPEJ6sewrzOV? zi8&rgWrG?jh3I@V)lXOQSJp@#$Zy$CjuB8VX<&@OCkf}7p3DIM#M9YC;=i)V#MiSO ziKnuii6^tE#1q-B#MiRv#8koe-nVB!ne zp~UC2!-+?-BZ-Hz*~I6vqlwRE#}c2(jwc?_p;|*;R>8WG53J&)$&u zSax;dquCo1AL-4dKb%^tY1Oh}%z&qcgBtLe_^Rm+{^~Z-zY~xakMhaXB*~6`Cqi;x z-M$%Dk`Mjdl3RZ^HLiI!iP~X33UJBcn(8Kj1C@-XKpvYJLUT}!Y)AF|q#<8Q4-)2M z{JR?8L`8pl$Kz?HkFdkllWC?;x0A92ygl_;Z{N35LSwnahsV|=J~XyA@xlIei4XLz zPrpCCVRc9nL@!eciijGzp5S^a5v@$em(v4bscHQsZUnMhjk4qv`aG0gxtaMTA@ZB* zQTJ#XT&I30Kb2;WFvfPeLe_sZouR%Hsy?g(+WG?>Sj!X;o{Z~f(ko~d8l5a3Gkh)G zO{0;G;tH~oT0(6Ub(oKEXFf1K@%{?$uctfZNE7YrF*adKV-Dh%XbLz%CE`&4!OUMw z4Uw+p7(3zvVC|`|qm*(%EH=h;{+LrJS`z%XwBazgM^GUy?86#GZiK zlTHBwu1P!z6S9~=hNA|&SdOiLL!eFbQ`4jABo0eSc6>fPqAkGAsnwF-SohMSx+VEb zyRDFYPl7tBWh|-o%qQGC@F+P>H+$Offcez0--Q?c)=GtfpF&KJ$lpz%!cF1}&m2~; z3n-V=OH}ce_F@Gg`}~r7Gseh|1b}U#jJ7y0{p)Y#>C}U&|G19l;t*eU_hfr?w=u{!qHrUmhNYh%7lxsyM*?;uE0oh{i1^@s6 From bc63a9a594a93ab7c7e76e5db3d149fb842eb156 Mon Sep 17 00:00:00 2001 From: jaanbaaz Date: Sun, 22 Dec 2024 21:50:41 +0530 Subject: [PATCH 19/32] Model and migration changes --- db/dmp_cron.py | 32 ++++++++++++++++---------------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/db/dmp_cron.py b/db/dmp_cron.py index 6cdfcb4..ded0b37 100644 --- a/db/dmp_cron.py +++ b/db/dmp_cron.py @@ -40,12 +40,12 @@ async def get_all_dmp_issues(async_session): try: async with async_session() as session: # Alias for the DmpOrg table to use in the JSON_BUILD_OBJECT - dmp_org_alias = aliased(DmpOrg) + dmp_org_alias = aliased(DmpOrgs) # Build the query query = ( select( - DmpIssue, + DmpIssues, func.json_build_object( 'created_at', dmp_org_alias.created_at, 'description', dmp_org_alias.description, @@ -55,9 +55,9 @@ async def get_all_dmp_issues(async_session): 'repo_owner', dmp_org_alias.repo_owner ).label('dmp_orgs') ) - .outerjoin(dmp_org_alias, DmpIssue.org_id == dmp_org_alias.id) - .filter(DmpIssue.org_id.isnot(None)) - .order_by(DmpIssue.id) + .outerjoin(dmp_org_alias, DmpIssues.org_id == dmp_org_alias.id) + .filter(DmpIssues.org_id.isnot(None)) + .order_by(DmpIssues.id) ) # Execute the query and fetch results @@ -87,8 +87,8 @@ async def update_dmp_issue(async_session,issue_id: int, update_data: dict): async with session.begin(): # Build the update query query = ( - update(DmpIssue) - .where(DmpIssue.id == issue_id) + update(DmpIssues) + .where(DmpIssues.id == issue_id) .values(**update_data) ) @@ -108,7 +108,7 @@ async def upsert_data_orm(async_session, update_data): async with session.begin(): # Define the insert statement - stmt = insert(DmpIssueUpdate).values(**update_data) + stmt = insert(DmpIssueUpdates).values(**update_data) # Define the update statement in case of conflict stmt = stmt.on_conflict_do_update( @@ -145,7 +145,7 @@ async def upsert_pr_update(async_session, pr_update_data): pr_update_data['closed_at'] = datetime.fromisoformat(pr_update_data['closed_at']).replace(tzinfo=None) if pr_update_data['closed_at'] else None # Prepare the insert statement - stmt = insert(Prupdates).values(**pr_update_data) + stmt = insert(DmpPrUpdates).values(**pr_update_data) # Prepare the conflict resolution strategy stmt = stmt.on_conflict_do_update( @@ -179,10 +179,10 @@ async def update_dmp_week_update(async_session, update_data): async with session.begin(): # Define the filter conditions stmt = ( - select(DmpWeekUpdate) + select(DmpWeekUpdates) .where( - DmpWeekUpdate.week == update_data['week'], - DmpWeekUpdate.dmp_id == update_data['dmp_id'] + DmpWeekUpdates.week == update_data['week'], + DmpWeekUpdates.dmp_id == update_data['dmp_id'] ) ) @@ -208,9 +208,9 @@ async def get_week_updates(async_session, dmp_id, week): try: async with async_session() as session: # Build the ORM query - stmt = select(DmpWeekUpdate).where( - DmpWeekUpdate.dmp_id == dmp_id, - DmpWeekUpdate.week == week + stmt = select(DmpWeekUpdates).where( + DmpWeekUpdates.dmp_id == dmp_id, + DmpWeekUpdates.week == week ) # Execute the query result = await session.execute(stmt) @@ -231,7 +231,7 @@ async def insert_dmp_week_update(async_session, update_data): async with async_session() as session: async with session.begin(): # Define the insert statement - stmt = insert(DmpWeekUpdate).values(**update_data) + stmt = insert(DmpWeekUpdates).values(**update_data) # Execute the statement await session.execute(stmt) From 4cbeec231cfa916d112810f59f6573f47eda0807 Mon Sep 17 00:00:00 2001 From: jaanbaaz Date: Mon, 23 Dec 2024 15:27:39 +0530 Subject: [PATCH 20/32] dmp api changes and model changes --- db/dmp_api.py | 124 ++++++++++++++++++++++++++++++++++---------------- db/models.py | 5 +- 2 files changed, 88 insertions(+), 41 deletions(-) diff --git a/db/dmp_api.py b/db/dmp_api.py index 49c5e50..1093eb1 100644 --- a/db/dmp_api.py +++ b/db/dmp_api.py @@ -4,58 +4,104 @@ from dotenv import load_dotenv # from flask_sqlalchemy import SQLAlchemy import sqlalchemy +from sqlalchemy.future import select -# load_dotenv() -db = sqlalchemy() - class DmpAPIQueries: - def get_issue_query(): - results = ( - db.session.query( - DmpOrgs.id.label('org_id'), - DmpOrgs.name.label('org_name'), - func.json_agg( - func.json_build_object( - 'id', DmpIssues.id, - 'name', DmpIssues.title + async def get_issue_query(async_session): + try: + async with async_session() as session: + results = await session.execute( + select( + DmpOrgs.id.label('org_id'), + DmpOrgs.name.label('org_name'), + func.json_agg( + func.json_build_object( + 'id', DmpIssues.id, + 'name', DmpIssues.title + ) + ).label('issues') ) - ).label('issues') - ) - .outerjoin(DmpIssues, DmpOrgs.id == DmpIssues.org_id) - .group_by(DmpOrgs.id) - .order_by(DmpOrgs.id) - .all() - ) + .outerjoin(DmpIssues, DmpOrgs.id == DmpIssues.org_id) + .group_by(DmpOrgs.id) + .order_by(DmpOrgs.id) + ) + + # Extract results as a list of dictionaries if needed + data = results.all() + + return data + except Exception as e: + print(f"An error occurred: get_column_value {e}") + return None - return results - def get_issue_owner(name): - response = DmpOrgs.query.filter_by(name=name).all() - return response + async def get_issue_owner(async_session, name): + try: + async with async_session() as session: + response = await session.execute( + select(DmpOrgs).filter_by(name=name) + ) + results = response.scalars().all() + return results + except Exception as e: + print(f"An error occurred: get_column_value {e}") + return None - def get_actual_owner_query(owner): - results = DmpIssues.query.filter(DmpIssues.repo_owner.like(f'%{owner}%')).all() - results = [val.to_dict() for val in results] - return results + async def get_actual_owner_query(async_session, owner): + try: + async with async_session() as session: + response = await session.execute( + select(DmpIssues).filter(DmpIssues.repo_owner.like(f'%{owner}%')) + ) + results = response.scalars().all() # Fetch all matching rows as objects + results = [val.to_dict() for val in results] # Convert objects to dicts + return results + except Exception as e: + print(f"An error occurred: get_column_value {e}") + return None - def get_dmp_issues(issue_id): - results = DmpIssues.query.filter_by(id=issue_id).all() - results = [val.to_dict() for val in results] - return results + async def get_dmp_issues(async_session, issue_id): + try: + async with async_session() as session: + response = await session.execute( + select(DmpIssues).filter_by(id=issue_id) + ) + results = response.scalars().all() # Fetch all matching rows as objects + results = [val.to_dict() for val in results] # Convert objects to dicts + return results + except Exception as e: + print(f"An error occurred: get_column_value {e}") + return None - def get_dmp_issue_updates(dmp_issue_id): - results = DmpIssueUpdates.query.filter_by(dmp_id=dmp_issue_id).all() - results = [val.to_dict() for val in results] - return results + async def get_dmp_issue_updates(async_session, dmp_issue_id): + try: + async with async_session() as session: + response = await session.execute( + select(DmpIssueUpdates).filter_by(dmp_id=dmp_issue_id) + ) + results = response.scalars().all() # Fetch all matching rows as objects + results = [val.to_dict() for val in results] # Convert objects to dicts + return results + except Exception as e: + print(f"An error occurred: get_column_value {e}") + return None - def get_pr_data(dmp_issue_id): - pr_updates = DmpPrUpdates.query.filter_by(dmp_id=dmp_issue_id).all() - pr_updates_dict = [pr_update.to_dict() for pr_update in pr_updates] - return pr_updates_dict + async def get_pr_data(async_session, dmp_issue_id): + try: + async with async_session() as session: + response = await session.execute( + select(DmpPrUpdates).filter_by(dmp_id=dmp_issue_id) + ) + pr_updates = response.scalars().all() # Fetch all matching rows as objects + pr_updates_dict = [pr_update.to_dict() for pr_update in pr_updates] # Convert objects to dicts + return pr_updates_dict + except Exception as e: + print(f"An error occurred: get_column_value {e}") + return None \ No newline at end of file diff --git a/db/models.py b/db/models.py index 2a82f37..bd05abe 100644 --- a/db/models.py +++ b/db/models.py @@ -378,6 +378,7 @@ class DmpIssues(Base): org_id = Column(BigInteger, ForeignKey('dmp_orgs.id'), nullable=False) description = Column(Text, nullable=False) repo = Column(Text, nullable=False) + repo_owner = Column(Text, nullable=False) def __repr__(self): return f"" @@ -392,7 +393,8 @@ def to_dict(self): 'title': self.title, 'org_id': self.org_id, 'description': self.description, - 'repo': self.repo + 'repo': self.repo, + 'repo_owner': self.repo_owner } class DmpOrgs(Base): @@ -404,7 +406,6 @@ class DmpOrgs(Base): description = Column(Text, nullable=False) link = Column(Text, nullable=False) repo_owner = Column(Text, nullable=False) - version = Column(Text, nullable=True) # issues = relationship('Issues', backref='organization', lazy='joined') From 2d77a75b6be0b889d57e2ba3beb4babef4a8db7b Mon Sep 17 00:00:00 2001 From: jaanbaaz Date: Mon, 23 Dec 2024 16:53:20 +0530 Subject: [PATCH 21/32] dmp api type casting --- db/dmp_api.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/db/dmp_api.py b/db/dmp_api.py index 1093eb1..081f714 100644 --- a/db/dmp_api.py +++ b/db/dmp_api.py @@ -67,7 +67,7 @@ async def get_dmp_issues(async_session, issue_id): try: async with async_session() as session: response = await session.execute( - select(DmpIssues).filter_by(id=issue_id) + select(DmpIssues).filter_by(id=int(issue_id)) ) results = response.scalars().all() # Fetch all matching rows as objects results = [val.to_dict() for val in results] # Convert objects to dicts @@ -81,7 +81,7 @@ async def get_dmp_issue_updates(async_session, dmp_issue_id): try: async with async_session() as session: response = await session.execute( - select(DmpIssueUpdates).filter_by(dmp_id=dmp_issue_id) + select(DmpIssueUpdates).filter_by(dmp_id=int(dmp_issue_id)) ) results = response.scalars().all() # Fetch all matching rows as objects results = [val.to_dict() for val in results] # Convert objects to dicts @@ -95,7 +95,7 @@ async def get_pr_data(async_session, dmp_issue_id): try: async with async_session() as session: response = await session.execute( - select(DmpPrUpdates).filter_by(dmp_id=dmp_issue_id) + select(DmpPrUpdates).filter_by(dmp_id=int(dmp_issue_id)) ) pr_updates = response.scalars().all() # Fetch all matching rows as objects pr_updates_dict = [pr_update.to_dict() for pr_update in pr_updates] # Convert objects to dicts From a7380436ef5c65eae86011d67260544ee8af87fc Mon Sep 17 00:00:00 2001 From: jaanbaaz Date: Tue, 24 Dec 2024 11:38:27 +0530 Subject: [PATCH 22/32] Relative paths for models --- db/discord-bot.py | 2 +- db/dmp_api.py | 2 +- db/dmp_cron.py | 6 +++--- db/server.py | 4 ++-- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/db/discord-bot.py b/db/discord-bot.py index a9e0de6..e62dd8b 100644 --- a/db/discord-bot.py +++ b/db/discord-bot.py @@ -4,7 +4,7 @@ from dotenv import load_dotenv from sqlalchemy import create_engine,select,desc,update,delete from sqlalchemy.orm import sessionmaker -from models import * +from .models import * from sqlalchemy.ext.declarative import DeclarativeMeta from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession diff --git a/db/dmp_api.py b/db/dmp_api.py index 081f714..84f5a82 100644 --- a/db/dmp_api.py +++ b/db/dmp_api.py @@ -1,4 +1,4 @@ -from models import * +from .models import * from sqlalchemy import func import os from dotenv import load_dotenv diff --git a/db/dmp_cron.py b/db/dmp_cron.py index ded0b37..8dd0135 100644 --- a/db/dmp_cron.py +++ b/db/dmp_cron.py @@ -1,5 +1,5 @@ from sqlalchemy.future import select -from models import * +from .models import * from sqlalchemy import update # from app import async_session from sqlalchemy.dialects.postgresql import insert @@ -70,9 +70,9 @@ async def get_all_dmp_issues(async_session): issue_dict = row._asdict() # Convert row to dict dmp_orgs = issue_dict.pop('dmp_orgs') # Extract JSON object from row issue_dict['dmp_orgs'] = dmp_orgs - issue_dict.update(issue_dict['DmpIssue'].to_dict()) + issue_dict.update(issue_dict['DmpIssues'].to_dict()) # Add JSON object back to dict - del issue_dict['DmpIssue'] + del issue_dict['DmpIssues'] data.append(issue_dict) return data diff --git a/db/server.py b/db/server.py index f098c7f..c366f5a 100644 --- a/db/server.py +++ b/db/server.py @@ -6,7 +6,7 @@ from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession from sqlalchemy.pool import NullPool from sqlalchemy.ext.declarative import DeclarativeMeta -from models import Base, ContributorsRegistration,GithubClassroomData, IssueContributors +from .models import Base, ContributorsRegistration,GithubClassroomData, IssueContributors from sqlalchemy import delete, insert from sqlalchemy import select, asc, desc,update, join from sqlalchemy.exc import IntegrityError @@ -14,7 +14,7 @@ from datetime import datetime from sqlalchemy import cast, String ,and_ from sqlalchemy.dialects.postgresql import ARRAY -from models import Issues, CommunityOrgs, PointSystem, PrHistory +from .models import Issues, CommunityOrgs, PointSystem, PrHistory # dotenv.load_dotenv(".env") From c51f330b78c732b57addb2e4c4aa73b2362e72c8 Mon Sep 17 00:00:00 2001 From: jaanbaaz Date: Tue, 24 Dec 2024 11:59:14 +0530 Subject: [PATCH 23/32] Shared models integrated --- .gitmodules | 3 + app.py | 20 ++-- models.py | 282 +++++++++++++++++++++++----------------------- shared_migrations | 1 + test.py | 12 +- utils.py | 8 +- 6 files changed, 166 insertions(+), 160 deletions(-) create mode 100644 .gitmodules create mode 160000 shared_migrations diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000..f899050 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "shared_migrations"] + path = shared_migrations + url = git@github.com:Code4GovTech/shared-models-migrations.git diff --git a/app.py b/app.py index 4b9706a..4a5c8ec 100644 --- a/app.py +++ b/app.py @@ -4,12 +4,14 @@ from apscheduler.schedulers.asyncio import AsyncIOScheduler from dotenv import load_dotenv from datetime import datetime,timezone -from query import PostgresORM +# from query import PostgresORM +from shared_migrations.db import PostgresORM, get_postgres_uri +from shared_migrations.db.dmp_cron import DmpCronQueries from utils import handle_week_data, parse_issue_description from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession from sqlalchemy.orm import sessionmaker from datetime import datetime -from models import * +from shared_migrations.db.models import * from sqlalchemy.pool import NullPool @@ -23,7 +25,7 @@ app = Quart(__name__) # Initialize Quart app -app.config['SQLALCHEMY_DATABASE_URI'] = PostgresORM.get_postgres_uri() +app.config['SQLALCHEMY_DATABASE_URI'] = get_postgres_uri() # Initialize Async SQLAlchemy engine = create_async_engine(app.config['SQLALCHEMY_DATABASE_URI'], echo=False,poolclass=NullPool) @@ -106,7 +108,7 @@ async def dmp_updates(): TARGET_DATE = os.getenv('TARGET_DATE') # Loop through all dmp issues - dmp_tickets = await PostgresORM.get_all_dmp_issues(async_session) + dmp_tickets = await DmpCronQueries.get_all_dmp_issues(async_session) for dmp in dmp_tickets: dmp_id = dmp['id'] @@ -141,7 +143,7 @@ async def dmp_updates(): app.logger.info('Decription from remote: ', issue_update) - update_data = await PostgresORM.update_dmp_issue(async_session,issue_id=dmp_id, update_data=issue_update) + update_data = await DmpCronQueries.update_dmp_issue(async_session,issue_id=dmp_id, update_data=issue_update) print(f"dmp_issue update works - dmp_id {dmp_id}") if update_data else print(f"dmp_issue update failed - dmp_id {dmp_id}") app.logger.info(update_data) @@ -181,12 +183,12 @@ async def dmp_updates(): app.logger.info('Comment from remote: ', comment_update) #get created_at - created_timestamp = await PostgresORM.get_timestamp(async_session, DmpIssueUpdate, 'created_at', 'comment_id', comment_update['comment_id']) + created_timestamp = await DmpCronQueries.get_timestamp(async_session, DmpIssueUpdates, 'created_at', 'comment_id', comment_update['comment_id']) comment_update['created_at'] = datetime.utcnow() if not created_timestamp else created_timestamp comment_update['comment_updated_at'] = datetime.utcnow().replace(tzinfo=None) comment_update['created_at'] = comment_update['created_at'].replace(tzinfo=None) - upsert_comments = await PostgresORM.upsert_data_orm(async_session,comment_update) + upsert_comments = await DmpCronQueries.upsert_data_orm(async_session,comment_update) print(f"dmp_issue_updates works dmp_id - {dmp_id}") if upsert_comments else print(f"comment failed dmp_id - {dmp_id}") app.logger.info(upsert_comments) @@ -212,11 +214,11 @@ async def dmp_updates(): if (pr_created_at >= TARGET_DATE): pr_data = define_pr_update(pr_val, dmp_id) - created_timestamp = await PostgresORM.get_timestamp(async_session,Prupdates,'created_at','pr_id',pr_data['pr_id']) + created_timestamp = await DmpCronQueries.get_timestamp(async_session, DmpPrUpdates,'created_at','pr_id',pr_data['pr_id']) pr_data['created_at'] = datetime.utcnow() if not created_timestamp else created_timestamp pr_data['created_at'] = pr_data['created_at'].replace(tzinfo=None) - upsert_pr = await PostgresORM.upsert_pr_update(async_session,pr_data) + upsert_pr = await DmpCronQueries.upsert_pr_update(async_session,pr_data) print(f"dmp_pr_updates works - dmp_id is {dmp_id}") if upsert_pr else print(f"dmp_pr_updates failed - dmp_id is {dmp_id}") app.logger.info(upsert_pr) diff --git a/models.py b/models.py index 951571b..757bc1c 100644 --- a/models.py +++ b/models.py @@ -1,142 +1,142 @@ -from datetime import datetime -from sqlalchemy import Column, Integer, String, Text, DateTime, ForeignKey, func,BigInteger -from sqlalchemy.orm import relationship -from sqlalchemy.ext.declarative import declarative_base - - -Base = declarative_base() - - - -# Define your models -class DmpIssue(Base): - __tablename__ = 'dmp_issues' - - id = Column(Integer, primary_key=True, autoincrement=True) - issue_url = Column(String, nullable=False) - issue_number = Column(Integer, nullable=False) - mentor_username = Column(String, nullable=True) - contributor_username = Column(String, nullable=True) - title = Column(String, nullable=False) - org_id = Column(Integer, ForeignKey('dmp_orgs.id'), nullable=False) - description = Column(Text, nullable=True) - repo = Column(String, nullable=True) - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'id': self.id, - 'issue_url': self.issue_url, - 'issue_number': self.issue_number, - 'mentor_username': self.mentor_username, - 'contributor_username': self.contributor_username, - 'title': self.title, - 'org_id': self.org_id, - 'description': self.description, - 'repo': self.repo - } - -class DmpOrg(Base): - __tablename__ = 'dmp_orgs' - - id = Column(Integer, primary_key=True, autoincrement=True) - created_at = Column(DateTime, default=datetime.utcnow, nullable=False) - name = Column(String, nullable=False) - description = Column(Text, nullable=True) - link = Column(String, nullable=False) - repo_owner = Column(String, nullable=False) - dmp_issues = relationship('DmpIssue', backref='organization', lazy=True) - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'id': self.id, - 'created_at': self.created_at.isoformat(), - 'name': self.name, - 'description': self.description, - 'link': self.link, - 'repo_owner': self.repo_owner - } - - -class DmpIssueUpdate(Base): - __tablename__ = 'dmp_issue_updates' - - created_at = Column(DateTime, default=datetime.utcnow, nullable=False) - body_text = Column(Text, nullable=False) - comment_link = Column(String, nullable=False) - comment_id = Column(BigInteger, primary_key=True, nullable=False) - comment_api = Column(String, nullable=False) - comment_updated_at = Column(DateTime, nullable=False) - dmp_id = Column(Integer, ForeignKey('dmp_orgs.id'), nullable=False) - created_by = Column(String, nullable=False) - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'created_at': self.created_at.isoformat(), - 'body_text': self.body_text, - 'comment_link': self.comment_link, - 'comment_id': self.comment_id, - 'comment_api': self.comment_api, - 'comment_updated_at': self.comment_updated_at.isoformat(), - 'dmp_id': self.dmp_id, - 'created_by': self.created_by - } - -class Prupdates(Base): - __tablename__ = 'dmp_pr_updates' - - created_at = Column(DateTime, nullable=False, default=datetime.utcnow) - pr_id = Column(Integer, nullable=False, primary_key=True) - status = Column(String, nullable=False) - title = Column(String, nullable=False) - pr_updated_at = Column(DateTime, nullable=False, default=datetime.utcnow) - merged_at = Column(DateTime) - closed_at = Column(DateTime) - dmp_id = Column(Integer, ForeignKey('dmp_issues.id'), nullable=False) - link = Column(String, nullable=False) - - def __repr__(self): - return f'' - - def to_dict(self): - return { - 'created_at': self.created_at.isoformat(), - 'pr_id': self.pr_id, - 'status': self.status, - 'title': self.title, - 'pr_updated_at': self.pr_updated_at.isoformat(), - 'merged_at': self.merged_at.isoformat() if self.merged_at else None, - 'closed_at': self.closed_at.isoformat() if self.closed_at else None, - 'dmp_id': self.dmp_id, - 'link': self.link - } - -class DmpWeekUpdate(Base): - __tablename__ = 'dmp_week_updates' - - id = Column(Integer, primary_key=True, autoincrement=True) - issue_url = Column(String, nullable=False) - week = Column(Integer, nullable=False) - total_task = Column(Integer, nullable=False) - completed_task = Column(Integer, nullable=False) - progress = Column(Integer, nullable=False) - task_data = Column(Text, nullable=False) - dmp_id = Column(Integer, nullable=False) - - def __repr__(self): - return f"" +# from datetime import datetime +# from sqlalchemy import Column, Integer, String, Text, DateTime, ForeignKey, func,BigInteger +# from sqlalchemy.orm import relationship +# from sqlalchemy.ext.declarative import declarative_base + + +# Base = declarative_base() + + + +# # Define your models +# class DmpIssue(Base): +# __tablename__ = 'dmp_issues' + +# id = Column(Integer, primary_key=True, autoincrement=True) +# issue_url = Column(String, nullable=False) +# issue_number = Column(Integer, nullable=False) +# mentor_username = Column(String, nullable=True) +# contributor_username = Column(String, nullable=True) +# title = Column(String, nullable=False) +# org_id = Column(Integer, ForeignKey('dmp_orgs.id'), nullable=False) +# description = Column(Text, nullable=True) +# repo = Column(String, nullable=True) + +# def __repr__(self): +# return f"" + +# def to_dict(self): +# return { +# 'id': self.id, +# 'issue_url': self.issue_url, +# 'issue_number': self.issue_number, +# 'mentor_username': self.mentor_username, +# 'contributor_username': self.contributor_username, +# 'title': self.title, +# 'org_id': self.org_id, +# 'description': self.description, +# 'repo': self.repo +# } + +# class DmpOrg(Base): +# __tablename__ = 'dmp_orgs' + +# id = Column(Integer, primary_key=True, autoincrement=True) +# created_at = Column(DateTime, default=datetime.utcnow, nullable=False) +# name = Column(String, nullable=False) +# description = Column(Text, nullable=True) +# link = Column(String, nullable=False) +# repo_owner = Column(String, nullable=False) +# dmp_issues = relationship('DmpIssue', backref='organization', lazy=True) + +# def __repr__(self): +# return f"" + +# def to_dict(self): +# return { +# 'id': self.id, +# 'created_at': self.created_at.isoformat(), +# 'name': self.name, +# 'description': self.description, +# 'link': self.link, +# 'repo_owner': self.repo_owner +# } + + +# class DmpIssueUpdate(Base): +# __tablename__ = 'dmp_issue_updates' + +# created_at = Column(DateTime, default=datetime.utcnow, nullable=False) +# body_text = Column(Text, nullable=False) +# comment_link = Column(String, nullable=False) +# comment_id = Column(BigInteger, primary_key=True, nullable=False) +# comment_api = Column(String, nullable=False) +# comment_updated_at = Column(DateTime, nullable=False) +# dmp_id = Column(Integer, ForeignKey('dmp_orgs.id'), nullable=False) +# created_by = Column(String, nullable=False) + +# def __repr__(self): +# return f"" + +# def to_dict(self): +# return { +# 'created_at': self.created_at.isoformat(), +# 'body_text': self.body_text, +# 'comment_link': self.comment_link, +# 'comment_id': self.comment_id, +# 'comment_api': self.comment_api, +# 'comment_updated_at': self.comment_updated_at.isoformat(), +# 'dmp_id': self.dmp_id, +# 'created_by': self.created_by +# } + +# class Prupdates(Base): +# __tablename__ = 'dmp_pr_updates' + +# created_at = Column(DateTime, nullable=False, default=datetime.utcnow) +# pr_id = Column(Integer, nullable=False, primary_key=True) +# status = Column(String, nullable=False) +# title = Column(String, nullable=False) +# pr_updated_at = Column(DateTime, nullable=False, default=datetime.utcnow) +# merged_at = Column(DateTime) +# closed_at = Column(DateTime) +# dmp_id = Column(Integer, ForeignKey('dmp_issues.id'), nullable=False) +# link = Column(String, nullable=False) + +# def __repr__(self): +# return f'' + +# def to_dict(self): +# return { +# 'created_at': self.created_at.isoformat(), +# 'pr_id': self.pr_id, +# 'status': self.status, +# 'title': self.title, +# 'pr_updated_at': self.pr_updated_at.isoformat(), +# 'merged_at': self.merged_at.isoformat() if self.merged_at else None, +# 'closed_at': self.closed_at.isoformat() if self.closed_at else None, +# 'dmp_id': self.dmp_id, +# 'link': self.link +# } + +# class DmpWeekUpdate(Base): +# __tablename__ = 'dmp_week_updates' + +# id = Column(Integer, primary_key=True, autoincrement=True) +# issue_url = Column(String, nullable=False) +# week = Column(Integer, nullable=False) +# total_task = Column(Integer, nullable=False) +# completed_task = Column(Integer, nullable=False) +# progress = Column(Integer, nullable=False) +# task_data = Column(Text, nullable=False) +# dmp_id = Column(Integer, nullable=False) + +# def __repr__(self): +# return f"" - def to_dict(self): - return { - 'id': self.id, - 'week': self.week, - 'dmp_id': self.dmp_id, - } +# def to_dict(self): +# return { +# 'id': self.id, +# 'week': self.week, +# 'dmp_id': self.dmp_id, +# } diff --git a/shared_migrations b/shared_migrations new file mode 160000 index 0000000..8bef9ed --- /dev/null +++ b/shared_migrations @@ -0,0 +1 @@ +Subproject commit 8bef9ed7e5e524dfbb7a0d1386a53561eb6f4ebe diff --git a/test.py b/test.py index 72a1cc9..e48af9f 100644 --- a/test.py +++ b/test.py @@ -4,7 +4,7 @@ from query import PostgresORM from sqlalchemy.orm import aliased from sqlalchemy.future import select -from models import * +from shared_migrations.db.models import * # Suppress asyncio debug messages logging.getLogger('asyncio').setLevel(logging.CRITICAL) @@ -38,12 +38,12 @@ async def asyncSetUp(self): # CHANGE BELOW DB CALL WHEN CHANGES MADE IN PostgresORM.get_all_dmp_issues() async with async_session() as session: # Alias for the DmpOrg table to use in the JSON_BUILD_OBJECT - dmp_org_alias = aliased(DmpOrg) + dmp_org_alias = aliased(DmpOrgs) # Build the query query = ( select( - DmpIssue, + DmpIssues, func.json_build_object( 'created_at', dmp_org_alias.created_at, 'description', dmp_org_alias.description, @@ -53,9 +53,9 @@ async def asyncSetUp(self): 'repo_owner', dmp_org_alias.repo_owner ).label('dmp_orgs') ) - .outerjoin(dmp_org_alias, DmpIssue.org_id == dmp_org_alias.id) - .filter(DmpIssue.org_id.isnot(None)) - .order_by(DmpIssue.id) + .outerjoin(dmp_org_alias, DmpIssues.org_id == dmp_org_alias.id) + .filter(DmpIssues.org_id.isnot(None)) + .order_by(DmpIssues.id) ) # Execute the query and fetch results diff --git a/utils.py b/utils.py index a269f8d..d77860e 100644 --- a/utils.py +++ b/utils.py @@ -2,7 +2,7 @@ import requests import logging import markdown2 -from query import PostgresORM +from shared_migrations.db.dmp_cron import DmpCronQueries def parse_issue_description(issue_body): # Description is everything before goals. @@ -89,13 +89,13 @@ async def handle_week_data(comment, issue_url, dmp_id, mentor_name,async_session } - exist = await PostgresORM.get_week_updates(async_session,week_json['dmp_id'],week_json['week']) + exist = await DmpCronQueries.get_week_updates(async_session,week_json['dmp_id'],week_json['week']) if not exist: - add_data = await PostgresORM.insert_dmp_week_update(async_session,week_json) + add_data = await DmpCronQueries.insert_dmp_week_update(async_session,week_json) print(f"Week data added {week_json['dmp_id']}-{week_json['week']}") if add_data else None else: - update_data = await PostgresORM.update_dmp_week_update(async_session,week_json) + update_data = await DmpCronQueries.update_dmp_week_update(async_session,week_json) print(f"Week data updated {week_json['dmp_id']}-{week_json['week']}") if update_data else None week_json = {} From 741896b0d730edb7784813146444348eda8c9aac Mon Sep 17 00:00:00 2001 From: jaanbaaz Date: Tue, 24 Dec 2024 13:59:03 +0530 Subject: [PATCH 24/32] discord bot renamed --- db/discord_bot.py | 302 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 302 insertions(+) create mode 100644 db/discord_bot.py diff --git a/db/discord_bot.py b/db/discord_bot.py new file mode 100644 index 0000000..e62dd8b --- /dev/null +++ b/db/discord_bot.py @@ -0,0 +1,302 @@ +import os + +# from discord import Member +from dotenv import load_dotenv +from sqlalchemy import create_engine,select,desc,update,delete +from sqlalchemy.orm import sessionmaker +from .models import * +from sqlalchemy.ext.declarative import DeclarativeMeta +from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession + +# load_dotenv() + +class DiscordBotQueries: + # def __init__(self): + # DB_HOST = os.getenv('POSTGRES_DB_HOST') + # DB_NAME = os.getenv('POSTGRES_DB_NAME') + # DB_USER = os.getenv('POSTGRES_DB_USER') + # DB_PASS = os.getenv('POSTGRES_DB_PASS') + + # engine = create_async_engine(f'postgresql+asyncpg://{DB_USER}:{DB_PASS}@{DB_HOST}/{DB_NAME}') + # async_session = sessionmaker(autocommit=False, autoflush=False, bind=engine, class_=AsyncSession) + # self.session = async_session + + # def get_instance(): + # return PostgresClient() + + def convert_dict(self,data): + try: + if type(data) == list: + data = [val.to_dict() for val in data] + else: + return [data.to_dict()] + + return data + except Exception as e: + print(e) + raise Exception + + def getStatsStorage(self, fileName): + return self.client.storage.from_("c4gt-github-profile").download(fileName) + + + def logVCAction(self,user, action): + try: + new_log = VcLogs(discord_id=user.id, discord_name=user.name, option=action) + self.session.add(new_log) + self.session.commit() + return self.convert_dict(new_log) + except Exception as e: + self.session.rollback() + print("Error logging VC action:", e) + return None + + def getLeaderboard(self, id: int): + data = self.session.query(Leaderboard).where(Leaderboard.discord_id == id).all() + return self.convert_dict(data) + + + def read(self, table_class, query_key, query_value, columns=None): + try: + stmt = select(table_class) + stmt = stmt.where(getattr(table_class, query_key) == query_value) + + if columns: + stmt = stmt.with_only_columns(*(getattr(table_class, col) for col in columns)) + result = self.session.execute(stmt) + rows = result.fetchall() + column_names = [col.name for col in stmt.columns] + data = [dict(zip(column_names, row)) for row in rows] + return data + + result = self.session.execute(stmt) + return self.convert_dict(result.scalars().all()) + + except Exception as e: + print(f"Error reading data from table '{table_class}':", e) + return None + + def get_class_by_tablename(self,tablename): + try: + for cls in Base.registry._class_registry.values(): + if isinstance(cls, DeclarativeMeta): + if hasattr(cls, '__tablename__') and cls.__tablename__ == tablename: + return cls + return None + except Exception as e: + print(f"ERROR get_class_by_tablename - {e}") + return None + + def read_by_order_limit(self, table_class, query_key, query_value, order_column, order_by=False, limit=1, columns="*"): + try: + stmt = select(table_class) + stmt = stmt.where(getattr(table_class, query_key) == query_value) + if order_by: + stmt = stmt.order_by(desc(getattr(table_class, order_column))) + else: + stmt = stmt.order_by(getattr(table_class, order_column)) + + stmt = stmt.limit(limit) + if columns != "*": + stmt = stmt.with_only_columns(*(getattr(table_class, col) for col in columns)) + + result = self.session.execute(stmt) + results = result.fetchall() + + # Convert results to list of dictionaries + column_names = [col['name'] for col in result.keys()] + data = [dict(zip(column_names, row)) for row in results] + + return data + + except Exception as e: + print("Error reading data:", e) + return None + + async def read_all(self,table_class): + try: + table = self.get_class_by_tablename(table_class) + # Query all records from the specified table class + async with self.session() as session: + stmt = select(table) + result = await session.execute(stmt) + + data = result.scalars().all() + result = self.convert_dict(data) + return result + except Exception as e: + print(f"An error occurred -read_all_from_table : {e}") + return None + + def update(self, table_class, update_data, query_key, query_value): + try: + stmt = ( + update(table_class) + .where(getattr(table_class, query_key) == query_value) + .values(update_data) + .returning(*[getattr(table_class, col) for col in update_data.keys()]) # Return updated columns + ) + + result = self.session.execute(stmt) + self.session.commit() + updated_record = result.fetchone() + + if updated_record: + updated_record_dict = dict(zip(result.keys(), updated_record)) + return updated_record_dict + else: + return None + except Exception as e: + import pdb;pdb.set_trace() + print("Error updating record:", e) + return None + + + def insert(self, table, data): + try: + new_record = table(**data) + self.session.add(new_record) + self.session.commit() + return new_record.to_dict() + except Exception as e: + print("Error inserting data:", e) + self.session.rollback() # Rollback in case of error + return None + + + def memberIsAuthenticated(self, member: Member): + data = self.session.query(ContributorsRegistration).where(ContributorsRegistration.discord_id == member.id).all() + if data: + return True + else: + return False + + def addChapter(self, roleId: int, orgName: str, type: str): + try: + existing_record = self.session.query(Chapters).filter_by(discord_role_id=roleId).first() + + if existing_record: + existing_record.type = type + existing_record.org_name = orgName + else: + new_record = Chapters(discord_role_id=roleId, type=type, org_name=orgName) + self.session.add(new_record) + + self.session.commit() + return existing_record.to_dict() if existing_record else new_record.to_dict() + except Exception as e: + print("Error adding or updating chapter:", e) + return None + + + def deleteChapter(self,roleId: int): + try: + # Build the delete statement + stmt = delete(Chapters).where(Chapters.discord_role_id == roleId) + result = self.session.execute(stmt) + self.session.commit() + return True if result.rowcount else False + except Exception as e: + print("Error deleting chapter:", e) + return None + + async def updateContributor(self, contributor: Member, table_class=None): + try: + async with self.session() as session: + if table_class == None: + table_class = ContributorsDiscord + chapters = lookForRoles(contributor.roles)["chapter_roles"] + gender = lookForRoles(contributor.roles)["gender"] + + # Prepare the data to be upserted + update_data = { + "discord_id": contributor.id, + "discord_username": contributor.name, + "chapter": chapters[0] if chapters else None, + "gender": gender, + "joined_at": contributor.joined_at, + } + + stmt = select(ContributorsDiscord).where(ContributorsDiscord.discord_id == contributor.id) + result = await session.execute(stmt) + existing_record = result.scalars().first() + + # existing_record = self.session.query(table_class).filter_by(discord_id=contributor.id).first() + + if existing_record: + stmt = ( + update(table_class) + .where(table_class.discord_id == contributor.id) + .values(update_data) + ) + self.session.execute(stmt) + else: + new_record = table_class(**update_data) + self.session.add(new_record) + + # Commit the transaction + self.session.commit() + return True + except Exception as e: + print("Error updating contributor:", e) + return False + + + def updateContributors(self, contributors: [Member], table_class): + try: + for contributor in contributors: + chapters = lookForRoles(contributor.roles)["chapter_roles"] + gender = lookForRoles(contributor.roles)["gender"] + update_data = { + "discord_id": contributor.id, + "discord_username": contributor.name, + "chapter": chapters[0] if chapters else None, + "gender": gender, + "joined_at": contributor.joined_at, + } + existing_record = self.session.query(table_class).filter_by(discord_id=contributor.id).first() + + if existing_record: + stmt = ( + update(table_class) + .where(table_class.discord_id == contributor.id) + .values(update_data) + ) + self.session.execute(stmt) + else: + new_record = table_class(**update_data) + self.session.add(new_record) + + self.session.commit() + return True + except Exception as e: + print("Error updating contributors:", e) + return False + + + def deleteContributorDiscord(self, contributorDiscordIds, table_class=None): + try: + if table_class == None: + table_class = ContributorsDiscord + stmt = delete(table_class).where(table_class.discord_id.in_(contributorDiscordIds)) + self.session.execute(stmt) + self.session.commit() + + return True + except Exception as e: + print("Error deleting contributors:", e) + self.session.rollback() + return False + + + + def read_all_active(self, table): + if table == "contributors_discord": + table = ContributorsDiscord + data = self.session.query(table).where(table.is_active == True).all() + return self.convert_dict(data) + + def invalidateContributorDiscord(self, contributorDiscordIds): + table = "contributors_discord" + for id in contributorDiscordIds: + self.client.table(table).update({'is_active': 'false'}).eq('discord_id', id).execute() From b43331bba6811abb55422d592c6aa46aa05c7ca1 Mon Sep 17 00:00:00 2001 From: Shreyash Date: Fri, 27 Dec 2024 13:22:09 +0530 Subject: [PATCH 25/32] Cleaned Submodule --- .gitmodules | 3 --- shared_migrations | 1 - 2 files changed, 4 deletions(-) delete mode 100644 .gitmodules delete mode 160000 shared_migrations diff --git a/.gitmodules b/.gitmodules deleted file mode 100644 index f899050..0000000 --- a/.gitmodules +++ /dev/null @@ -1,3 +0,0 @@ -[submodule "shared_migrations"] - path = shared_migrations - url = git@github.com:Code4GovTech/shared-models-migrations.git diff --git a/shared_migrations b/shared_migrations deleted file mode 160000 index 8bef9ed..0000000 --- a/shared_migrations +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 8bef9ed7e5e524dfbb7a0d1386a53561eb6f4ebe From 053997466ece070b880c08f34f33627c66c9f38a Mon Sep 17 00:00:00 2001 From: Shreyash Date: Fri, 27 Dec 2024 13:42:03 +0530 Subject: [PATCH 26/32] Trying git push --- app.py | 93 ++++++++++++++++++++++++++++++++-------------------------- 1 file changed, 52 insertions(+), 41 deletions(-) diff --git a/app.py b/app.py index 4a5c8ec..c08092d 100644 --- a/app.py +++ b/app.py @@ -1,9 +1,9 @@ # app.py from quart import Quart -import os,markdown2,httpx +import os, markdown2, httpx from apscheduler.schedulers.asyncio import AsyncIOScheduler from dotenv import load_dotenv -from datetime import datetime,timezone +from datetime import datetime, timezone # from query import PostgresORM from shared_migrations.db import PostgresORM, get_postgres_uri from shared_migrations.db.dmp_cron import DmpCronQueries @@ -15,12 +15,11 @@ from sqlalchemy.pool import NullPool - +# Random comment # Load environment variables from .env file load_dotenv() delay_mins: str = os.getenv("SCHEDULER_DELAY_IN_MINS") - app = Quart(__name__) @@ -28,7 +27,7 @@ app.config['SQLALCHEMY_DATABASE_URI'] = get_postgres_uri() # Initialize Async SQLAlchemy -engine = create_async_engine(app.config['SQLALCHEMY_DATABASE_URI'], echo=False,poolclass=NullPool) +engine = create_async_engine(app.config['SQLALCHEMY_DATABASE_URI'], echo=False, poolclass=NullPool) async_session = sessionmaker(autocommit=False, autoflush=False, bind=engine, class_=AsyncSession) scheduler = AsyncIOScheduler() @@ -97,8 +96,6 @@ def define_pr_update(pr_val, dmp_id): return {} - - @app.route('/dmp_updates') async def dmp_updates(): print( @@ -109,15 +106,15 @@ async def dmp_updates(): # Loop through all dmp issues dmp_tickets = await DmpCronQueries.get_all_dmp_issues(async_session) - + for dmp in dmp_tickets: - dmp_id = dmp['id'] + dmp_id = dmp['id'] print('processing dmp ids ', dmp_id) issue_number = dmp['issue_number'] repo = dmp['repo'] owner = dmp['repo_owner'] - app.logger.info("DMP_ID: "+str(dmp_id)) + app.logger.info("DMP_ID: " + str(dmp_id)) # # Make the HTTP request to GitHub API headers = { @@ -137,20 +134,22 @@ async def dmp_updates(): # Parse issue discription print('processing description ') issue_update = define_issue_description_update(issue_response.json()) - - issue_update['mentor_username'] = dmp['mentor_username'] #get from db - issue_update['contributor_username'] = dmp['contributor_username'] #get from db - + + issue_update['mentor_username'] = dmp['mentor_username'] # get from db + issue_update['contributor_username'] = dmp['contributor_username'] # get from db + app.logger.info('Decription from remote: ', issue_update) - - update_data = await DmpCronQueries.update_dmp_issue(async_session,issue_id=dmp_id, update_data=issue_update) - print(f"dmp_issue update works - dmp_id {dmp_id}") if update_data else print(f"dmp_issue update failed - dmp_id {dmp_id}") + update_data = await DmpCronQueries.update_dmp_issue(async_session, issue_id=dmp_id, + update_data=issue_update) + + print(f"dmp_issue update works - dmp_id {dmp_id}") if update_data else print( + f"dmp_issue update failed - dmp_id {dmp_id}") app.logger.info(update_data) else: print('issue response ', issue_response) app.logger.error("Description API failed: " + - str(issue_response.status_code) + " for dmp_id: "+str(dmp_id)) + str(issue_response.status_code) + " for dmp_id: " + str(dmp_id)) # 2. Read & Update comments of the ticket page = 1 @@ -167,35 +166,43 @@ async def dmp_updates(): week_learning_status = False # Loop through comments comments_array = comments_response.json() - if comments_array == [] or len(comments_array)==0: + if comments_array == [] or len(comments_array) == 0: break for val in comments_response.json(): # Handle if any of the comments are week data plain_text_body = markdown2.markdown(val['body']) if "Weekly Goals" in plain_text_body and not week_update_status: - week_update_status = await handle_week_data(val, dmp['issue_url'], dmp_id, issue_update['mentor_username'],async_session) - + week_update_status = await handle_week_data(val, dmp['issue_url'], dmp_id, + issue_update['mentor_username'], + async_session) + if "Weekly Learnings" in plain_text_body and not week_learning_status: - week_learning_status = await handle_week_data(val, dmp['issue_url'], dmp_id, issue_update['mentor_username'],async_session) - + week_learning_status = await handle_week_data(val, dmp['issue_url'], dmp_id, + issue_update['mentor_username'], + async_session) + # Parse comments comment_update = define_issue_update(val, dmp_id=dmp_id) app.logger.info('Comment from remote: ', comment_update) - - #get created_at - created_timestamp = await DmpCronQueries.get_timestamp(async_session, DmpIssueUpdates, 'created_at', 'comment_id', comment_update['comment_id']) - comment_update['created_at'] = datetime.utcnow() if not created_timestamp else created_timestamp + + # get created_at + created_timestamp = await DmpCronQueries.get_timestamp(async_session, DmpIssueUpdates, + 'created_at', 'comment_id', + comment_update['comment_id']) + comment_update[ + 'created_at'] = datetime.utcnow() if not created_timestamp else created_timestamp comment_update['comment_updated_at'] = datetime.utcnow().replace(tzinfo=None) - comment_update['created_at'] = comment_update['created_at'].replace(tzinfo=None) - - upsert_comments = await DmpCronQueries.upsert_data_orm(async_session,comment_update) - - print(f"dmp_issue_updates works dmp_id - {dmp_id}") if upsert_comments else print(f"comment failed dmp_id - {dmp_id}") + comment_update['created_at'] = comment_update['created_at'].replace(tzinfo=None) + + upsert_comments = await DmpCronQueries.upsert_data_orm(async_session, comment_update) + + print(f"dmp_issue_updates works dmp_id - {dmp_id}") if upsert_comments else print( + f"comment failed dmp_id - {dmp_id}") app.logger.info(upsert_comments) else: print('issue response ', issue_response) app.logger.error("Comments API failed: " + - str(issue_response.status_code) + " for dmp_id: "+str(dmp_id)) + str(issue_response.status_code) + " for dmp_id: " + str(dmp_id)) break page = page + 1 @@ -213,19 +220,22 @@ async def dmp_updates(): pr_created_at = pr_val['created_at'] if (pr_created_at >= TARGET_DATE): pr_data = define_pr_update(pr_val, dmp_id) - - created_timestamp = await DmpCronQueries.get_timestamp(async_session, DmpPrUpdates,'created_at','pr_id',pr_data['pr_id']) + + created_timestamp = await DmpCronQueries.get_timestamp(async_session, DmpPrUpdates, + 'created_at', 'pr_id', + pr_data['pr_id']) pr_data['created_at'] = datetime.utcnow() if not created_timestamp else created_timestamp pr_data['created_at'] = pr_data['created_at'].replace(tzinfo=None) - - upsert_pr = await DmpCronQueries.upsert_pr_update(async_session,pr_data) - - print(f"dmp_pr_updates works - dmp_id is {dmp_id}") if upsert_pr else print(f"dmp_pr_updates failed - dmp_id is {dmp_id}") + + upsert_pr = await DmpCronQueries.upsert_pr_update(async_session, pr_data) + + print(f"dmp_pr_updates works - dmp_id is {dmp_id}") if upsert_pr else print( + f"dmp_pr_updates failed - dmp_id is {dmp_id}") app.logger.info(upsert_pr) else: print('issue response ', issue_response) app.logger.error("PR API failed: " + - str(issue_response.status_code) + " for dmp_id: "+str(dmp_id)) + str(issue_response.status_code) + " for dmp_id: " + str(dmp_id)) print(f"last run at - {datetime.utcnow()}") return "success" except Exception as e: @@ -237,9 +247,10 @@ async def dmp_updates(): @app.before_serving async def start_scheduler(): app.logger.info( - "Scheduling dmp_updates_job to run every "+delay_mins+" mins") + "Scheduling dmp_updates_job to run every " + delay_mins + " mins") scheduler.add_job(dmp_updates, 'interval', minutes=int(delay_mins)) scheduler.start() + if __name__ == '__main__': app.run(host='0.0.0.0') From 510823f2d45420ef75f6bdbedfaa046ec4ef16e8 Mon Sep 17 00:00:00 2001 From: Shreyash Date: Fri, 27 Dec 2024 15:51:59 +0530 Subject: [PATCH 27/32] Removed Comments --- app.py | 7 +- query.py | 3 - shared_migrations/db/discord-bot.py | 302 ---------------------------- shared_migrations/db/discord_bot.py | 22 -- shared_migrations/db/dmp_api.py | 4 - shared_migrations/db/dmp_cron.py | 11 - shared_migrations/db/server.py | 31 +-- shared_migrations/migrations/env.py | 16 -- utils.py | 3 - 9 files changed, 3 insertions(+), 396 deletions(-) delete mode 100644 shared_migrations/db/discord-bot.py diff --git a/app.py b/app.py index c08092d..cc0a835 100644 --- a/app.py +++ b/app.py @@ -14,9 +14,6 @@ from shared_migrations.db.models import * from sqlalchemy.pool import NullPool - -# Random comment - # Load environment variables from .env file load_dotenv() delay_mins: str = os.getenv("SCHEDULER_DELAY_IN_MINS") @@ -169,7 +166,7 @@ async def dmp_updates(): if comments_array == [] or len(comments_array) == 0: break for val in comments_response.json(): - # Handle if any of the comments are week data + # Handle if any of the comments are week data plain_text_body = markdown2.markdown(val['body']) if "Weekly Goals" in plain_text_body and not week_update_status: week_update_status = await handle_week_data(val, dmp['issue_url'], dmp_id, @@ -253,4 +250,4 @@ async def start_scheduler(): if __name__ == '__main__': - app.run(host='0.0.0.0') + app.run(host='0.0.0.0') \ No newline at end of file diff --git a/query.py b/query.py index aeab7ea..c0d0b44 100644 --- a/query.py +++ b/query.py @@ -1,12 +1,9 @@ from sqlalchemy.future import select -from models import * from sqlalchemy import update -# from app import async_session from sqlalchemy.dialects.postgresql import insert from datetime import datetime from sqlalchemy.orm import aliased import os -from sqlalchemy.orm import Session from sqlalchemy.exc import NoResultFound diff --git a/shared_migrations/db/discord-bot.py b/shared_migrations/db/discord-bot.py deleted file mode 100644 index e62dd8b..0000000 --- a/shared_migrations/db/discord-bot.py +++ /dev/null @@ -1,302 +0,0 @@ -import os - -# from discord import Member -from dotenv import load_dotenv -from sqlalchemy import create_engine,select,desc,update,delete -from sqlalchemy.orm import sessionmaker -from .models import * -from sqlalchemy.ext.declarative import DeclarativeMeta -from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession - -# load_dotenv() - -class DiscordBotQueries: - # def __init__(self): - # DB_HOST = os.getenv('POSTGRES_DB_HOST') - # DB_NAME = os.getenv('POSTGRES_DB_NAME') - # DB_USER = os.getenv('POSTGRES_DB_USER') - # DB_PASS = os.getenv('POSTGRES_DB_PASS') - - # engine = create_async_engine(f'postgresql+asyncpg://{DB_USER}:{DB_PASS}@{DB_HOST}/{DB_NAME}') - # async_session = sessionmaker(autocommit=False, autoflush=False, bind=engine, class_=AsyncSession) - # self.session = async_session - - # def get_instance(): - # return PostgresClient() - - def convert_dict(self,data): - try: - if type(data) == list: - data = [val.to_dict() for val in data] - else: - return [data.to_dict()] - - return data - except Exception as e: - print(e) - raise Exception - - def getStatsStorage(self, fileName): - return self.client.storage.from_("c4gt-github-profile").download(fileName) - - - def logVCAction(self,user, action): - try: - new_log = VcLogs(discord_id=user.id, discord_name=user.name, option=action) - self.session.add(new_log) - self.session.commit() - return self.convert_dict(new_log) - except Exception as e: - self.session.rollback() - print("Error logging VC action:", e) - return None - - def getLeaderboard(self, id: int): - data = self.session.query(Leaderboard).where(Leaderboard.discord_id == id).all() - return self.convert_dict(data) - - - def read(self, table_class, query_key, query_value, columns=None): - try: - stmt = select(table_class) - stmt = stmt.where(getattr(table_class, query_key) == query_value) - - if columns: - stmt = stmt.with_only_columns(*(getattr(table_class, col) for col in columns)) - result = self.session.execute(stmt) - rows = result.fetchall() - column_names = [col.name for col in stmt.columns] - data = [dict(zip(column_names, row)) for row in rows] - return data - - result = self.session.execute(stmt) - return self.convert_dict(result.scalars().all()) - - except Exception as e: - print(f"Error reading data from table '{table_class}':", e) - return None - - def get_class_by_tablename(self,tablename): - try: - for cls in Base.registry._class_registry.values(): - if isinstance(cls, DeclarativeMeta): - if hasattr(cls, '__tablename__') and cls.__tablename__ == tablename: - return cls - return None - except Exception as e: - print(f"ERROR get_class_by_tablename - {e}") - return None - - def read_by_order_limit(self, table_class, query_key, query_value, order_column, order_by=False, limit=1, columns="*"): - try: - stmt = select(table_class) - stmt = stmt.where(getattr(table_class, query_key) == query_value) - if order_by: - stmt = stmt.order_by(desc(getattr(table_class, order_column))) - else: - stmt = stmt.order_by(getattr(table_class, order_column)) - - stmt = stmt.limit(limit) - if columns != "*": - stmt = stmt.with_only_columns(*(getattr(table_class, col) for col in columns)) - - result = self.session.execute(stmt) - results = result.fetchall() - - # Convert results to list of dictionaries - column_names = [col['name'] for col in result.keys()] - data = [dict(zip(column_names, row)) for row in results] - - return data - - except Exception as e: - print("Error reading data:", e) - return None - - async def read_all(self,table_class): - try: - table = self.get_class_by_tablename(table_class) - # Query all records from the specified table class - async with self.session() as session: - stmt = select(table) - result = await session.execute(stmt) - - data = result.scalars().all() - result = self.convert_dict(data) - return result - except Exception as e: - print(f"An error occurred -read_all_from_table : {e}") - return None - - def update(self, table_class, update_data, query_key, query_value): - try: - stmt = ( - update(table_class) - .where(getattr(table_class, query_key) == query_value) - .values(update_data) - .returning(*[getattr(table_class, col) for col in update_data.keys()]) # Return updated columns - ) - - result = self.session.execute(stmt) - self.session.commit() - updated_record = result.fetchone() - - if updated_record: - updated_record_dict = dict(zip(result.keys(), updated_record)) - return updated_record_dict - else: - return None - except Exception as e: - import pdb;pdb.set_trace() - print("Error updating record:", e) - return None - - - def insert(self, table, data): - try: - new_record = table(**data) - self.session.add(new_record) - self.session.commit() - return new_record.to_dict() - except Exception as e: - print("Error inserting data:", e) - self.session.rollback() # Rollback in case of error - return None - - - def memberIsAuthenticated(self, member: Member): - data = self.session.query(ContributorsRegistration).where(ContributorsRegistration.discord_id == member.id).all() - if data: - return True - else: - return False - - def addChapter(self, roleId: int, orgName: str, type: str): - try: - existing_record = self.session.query(Chapters).filter_by(discord_role_id=roleId).first() - - if existing_record: - existing_record.type = type - existing_record.org_name = orgName - else: - new_record = Chapters(discord_role_id=roleId, type=type, org_name=orgName) - self.session.add(new_record) - - self.session.commit() - return existing_record.to_dict() if existing_record else new_record.to_dict() - except Exception as e: - print("Error adding or updating chapter:", e) - return None - - - def deleteChapter(self,roleId: int): - try: - # Build the delete statement - stmt = delete(Chapters).where(Chapters.discord_role_id == roleId) - result = self.session.execute(stmt) - self.session.commit() - return True if result.rowcount else False - except Exception as e: - print("Error deleting chapter:", e) - return None - - async def updateContributor(self, contributor: Member, table_class=None): - try: - async with self.session() as session: - if table_class == None: - table_class = ContributorsDiscord - chapters = lookForRoles(contributor.roles)["chapter_roles"] - gender = lookForRoles(contributor.roles)["gender"] - - # Prepare the data to be upserted - update_data = { - "discord_id": contributor.id, - "discord_username": contributor.name, - "chapter": chapters[0] if chapters else None, - "gender": gender, - "joined_at": contributor.joined_at, - } - - stmt = select(ContributorsDiscord).where(ContributorsDiscord.discord_id == contributor.id) - result = await session.execute(stmt) - existing_record = result.scalars().first() - - # existing_record = self.session.query(table_class).filter_by(discord_id=contributor.id).first() - - if existing_record: - stmt = ( - update(table_class) - .where(table_class.discord_id == contributor.id) - .values(update_data) - ) - self.session.execute(stmt) - else: - new_record = table_class(**update_data) - self.session.add(new_record) - - # Commit the transaction - self.session.commit() - return True - except Exception as e: - print("Error updating contributor:", e) - return False - - - def updateContributors(self, contributors: [Member], table_class): - try: - for contributor in contributors: - chapters = lookForRoles(contributor.roles)["chapter_roles"] - gender = lookForRoles(contributor.roles)["gender"] - update_data = { - "discord_id": contributor.id, - "discord_username": contributor.name, - "chapter": chapters[0] if chapters else None, - "gender": gender, - "joined_at": contributor.joined_at, - } - existing_record = self.session.query(table_class).filter_by(discord_id=contributor.id).first() - - if existing_record: - stmt = ( - update(table_class) - .where(table_class.discord_id == contributor.id) - .values(update_data) - ) - self.session.execute(stmt) - else: - new_record = table_class(**update_data) - self.session.add(new_record) - - self.session.commit() - return True - except Exception as e: - print("Error updating contributors:", e) - return False - - - def deleteContributorDiscord(self, contributorDiscordIds, table_class=None): - try: - if table_class == None: - table_class = ContributorsDiscord - stmt = delete(table_class).where(table_class.discord_id.in_(contributorDiscordIds)) - self.session.execute(stmt) - self.session.commit() - - return True - except Exception as e: - print("Error deleting contributors:", e) - self.session.rollback() - return False - - - - def read_all_active(self, table): - if table == "contributors_discord": - table = ContributorsDiscord - data = self.session.query(table).where(table.is_active == True).all() - return self.convert_dict(data) - - def invalidateContributorDiscord(self, contributorDiscordIds): - table = "contributors_discord" - for id in contributorDiscordIds: - self.client.table(table).update({'is_active': 'false'}).eq('discord_id', id).execute() diff --git a/shared_migrations/db/discord_bot.py b/shared_migrations/db/discord_bot.py index e62dd8b..d9f5d13 100644 --- a/shared_migrations/db/discord_bot.py +++ b/shared_migrations/db/discord_bot.py @@ -1,28 +1,8 @@ -import os - -# from discord import Member -from dotenv import load_dotenv from sqlalchemy import create_engine,select,desc,update,delete -from sqlalchemy.orm import sessionmaker from .models import * from sqlalchemy.ext.declarative import DeclarativeMeta -from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession - -# load_dotenv() class DiscordBotQueries: - # def __init__(self): - # DB_HOST = os.getenv('POSTGRES_DB_HOST') - # DB_NAME = os.getenv('POSTGRES_DB_NAME') - # DB_USER = os.getenv('POSTGRES_DB_USER') - # DB_PASS = os.getenv('POSTGRES_DB_PASS') - - # engine = create_async_engine(f'postgresql+asyncpg://{DB_USER}:{DB_PASS}@{DB_HOST}/{DB_NAME}') - # async_session = sessionmaker(autocommit=False, autoflush=False, bind=engine, class_=AsyncSession) - # self.session = async_session - - # def get_instance(): - # return PostgresClient() def convert_dict(self,data): try: @@ -221,8 +201,6 @@ async def updateContributor(self, contributor: Member, table_class=None): result = await session.execute(stmt) existing_record = result.scalars().first() - # existing_record = self.session.query(table_class).filter_by(discord_id=contributor.id).first() - if existing_record: stmt = ( update(table_class) diff --git a/shared_migrations/db/dmp_api.py b/shared_migrations/db/dmp_api.py index 84f5a82..7528a8b 100644 --- a/shared_migrations/db/dmp_api.py +++ b/shared_migrations/db/dmp_api.py @@ -1,9 +1,5 @@ from .models import * from sqlalchemy import func -import os -from dotenv import load_dotenv -# from flask_sqlalchemy import SQLAlchemy -import sqlalchemy from sqlalchemy.future import select diff --git a/shared_migrations/db/dmp_cron.py b/shared_migrations/db/dmp_cron.py index 8dd0135..26e6d0e 100644 --- a/shared_migrations/db/dmp_cron.py +++ b/shared_migrations/db/dmp_cron.py @@ -1,25 +1,14 @@ from sqlalchemy.future import select from .models import * from sqlalchemy import update -# from app import async_session from sqlalchemy.dialects.postgresql import insert from datetime import datetime from sqlalchemy.orm import aliased -import os -from sqlalchemy.orm import Session from sqlalchemy.exc import NoResultFound class DmpCronQueries: - # def get_postgres_uri(): - # DB_HOST = os.getenv('POSTGRES_DB_HOST') - # DB_NAME = os.getenv('POSTGRES_DB_NAME') - # DB_USER = os.getenv('POSTGRES_DB_USER') - # DB_PASS = os.getenv('POSTGRES_DB_PASS') - - # return f'postgresql+asyncpg://{DB_USER}:{DB_PASS}@{DB_HOST}/{DB_NAME}' - async def get_timestamp(async_session, model, col_name: str, col: str, value): try: # Construct the ORM query diff --git a/shared_migrations/db/server.py b/shared_migrations/db/server.py index c366f5a..e621c65 100644 --- a/shared_migrations/db/server.py +++ b/shared_migrations/db/server.py @@ -16,31 +16,8 @@ from sqlalchemy.dialects.postgresql import ARRAY from .models import Issues, CommunityOrgs, PointSystem, PrHistory -# dotenv.load_dotenv(".env") - -# def get_postgres_uri(): -# DB_HOST = os.getenv('POSTGRES_DB_HOST') -# DB_NAME = os.getenv('POSTGRES_DB_NAME') -# DB_USER = os.getenv('POSTGRES_DB_USER') -# DB_PASS = os.getenv('POSTGRES_DB_PASS') - -# # DB_URL = os.getenv('DATABASE_URL') -# # print('db') -# return f'postgresql+asyncpg://{DB_USER}:{DB_PASS}@{DB_HOST}/{DB_NAME}' - - class ServerQueries: - - # def __init__(self): - # DATABASE_URL = get_postgres_uri() - # # Initialize Async SQLAlchemy - # engine = create_async_engine(DATABASE_URL, echo=False,poolclass=NullPool) - # async_session = sessionmaker(autocommit=False, autoflush=False, bind=engine, class_=AsyncSession) - # self.session = async_session - - # def get_instance(): - # return PostgresORM() def convert_dict(self,data): try: @@ -438,9 +415,6 @@ async def record_created_ticket(self, data,table_name): async with self.session() as session: # Dynamically get the ORM class for the table table = self.get_class_by_tablename(table_name) - - # Build and execute the query to check if the issue_id already exists - # stmt = select(table).where(table.issue_id == data['issue_id']) stmt = insert(table).values( link=data['link'], @@ -461,9 +435,7 @@ async def record_created_ticket(self, data,table_name): result = await session.execute(stmt) await session.commit() - - # inserted_record = await result.fetchone() - # print("inserted result ", inserted_record) + return result except Exception as e: @@ -625,7 +597,6 @@ async def addPr(self, prData, issue_id): for pr in prData: data = { - # "api_url":data["url"], "html_url":pr["html_url"], "pr_id":pr["pr_id"], "raised_by":pr["raised_by"], diff --git a/shared_migrations/migrations/env.py b/shared_migrations/migrations/env.py index 7961305..43002a6 100644 --- a/shared_migrations/migrations/env.py +++ b/shared_migrations/migrations/env.py @@ -24,9 +24,6 @@ # add your model's MetaData object here # for 'autogenerate' support -# from myapp import mymodel -# target_metadata = mymodel.Base.metadata -# target_metadata = shared_metadata target_metadata = Base.metadata # other values from the config, defined by the needs of env.py, @@ -66,19 +63,6 @@ def run_migrations_online() -> None: and associate a connection with the context. """ - # connectable = engine_from_config( - # config.get_section(config.config_ini_section, {}), - # prefix="sqlalchemy.", - # poolclass=pool.NullPool, - # ) - - # with connectable.connect() as connection: - # context.configure( - # connection=connection, target_metadata=target_metadata - # ) - - # with context.begin_transaction(): - # context.run_migrations() engine = engine_from_config( config.get_section(config.config_ini_section), prefix='sqlalchemy.') diff --git a/utils.py b/utils.py index d77860e..3b15e7a 100644 --- a/utils.py +++ b/utils.py @@ -35,9 +35,6 @@ def parse_issue_description(issue_body): async def handle_week_data(comment, issue_url, dmp_id, mentor_name,async_session): try: # Get writer of comment and if it is not the selected mentor, return right away - # writter = "@"+comment['user']['login'] - # if writter != mentor_name: - # return False plain_text_body = markdown2.markdown(comment['body']) From d982ba27b05d808f616f388b4a89bbe3872b8aca Mon Sep 17 00:00:00 2001 From: Srijan-SS02 Date: Fri, 27 Dec 2024 17:04:08 +0530 Subject: [PATCH 28/32] updated Dockerfile and .dockerignore to build submodules --- .dockerignore | 1 + Dockerfile | 11 ++++++++++- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/.dockerignore b/.dockerignore index 4c49bd7..6b6dd54 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1 +1,2 @@ .env +!.git \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index 4df6c07..95597f1 100644 --- a/Dockerfile +++ b/Dockerfile @@ -4,12 +4,21 @@ FROM python:3.12-slim # Set the working directory in the container WORKDIR /app -# Copy the current directory contents into the container at /app +# Install necessary tools +RUN apt-get update && \ + apt-get install -y --no-install-recommends git openssh-client && \ + rm -rf /var/lib/apt/lists/* + +# Copy the current directory contents, including the .git directory COPY . /app +# Set up the SSH agent forwarding +RUN --mount=type=ssh git submodule update --init --recursive + # Install any needed packages specified in requirements.txt RUN pip install --no-cache-dir -r requirements.txt +# Expose the application port EXPOSE 5000 # Run app.py when the container launches From 463b3d2fb91cb867edecbceee5a2f3be5cbe2e76 Mon Sep 17 00:00:00 2001 From: Srijan Srivastava Date: Fri, 27 Dec 2024 23:37:46 +0530 Subject: [PATCH 29/32] Updated requirements.txt for flask --- requirements.txt | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index d96e908..5053e82 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,10 +1,11 @@ APScheduler==3.10.4 httpx==0.27.0 python-dotenv==1.0.1 -Quart==0.19.5 +Quart==0.20.0 requests==2.32.3 markdown2==2.4.13 postgrest==0.16.4 psycopg2-binary==2.9.9 SQLAlchemy==2.0.32 -asyncpg==0.29.0 \ No newline at end of file +asyncpg==0.29.0 +flask==3.0.1 From 3f372fc2d127f325f66a3a12e7d74386891636cf Mon Sep 17 00:00:00 2001 From: Shreyash Date: Fri, 3 Jan 2025 14:08:53 +0530 Subject: [PATCH 30/32] Removed Submodule --- shared_migrations/.gitignore | 1 - shared_migrations/.vscode/settings.json | 5 - shared_migrations/README.md | Bin 58 -> 0 bytes shared_migrations/alembic.ini | 118 -- shared_migrations/db/__init__.py | 33 - shared_migrations/db/discord_bot.py | 280 --- shared_migrations/db/dmp_api.py | 103 - shared_migrations/db/dmp_cron.py | 235 --- shared_migrations/db/models.py | 1586 --------------- shared_migrations/db/server.py | 909 --------- shared_migrations/migrations/README | 1 - shared_migrations/migrations/env.py | 83 - shared_migrations/migrations/script.py.mako | 26 - .../8d1e6a7e959a_initial_migration.py | 1723 ----------------- shared_migrations/requirements.txt | Bin 164 -> 0 bytes shared_migrations/sample.env | 5 - 16 files changed, 5108 deletions(-) delete mode 100644 shared_migrations/.gitignore delete mode 100644 shared_migrations/.vscode/settings.json delete mode 100644 shared_migrations/README.md delete mode 100644 shared_migrations/alembic.ini delete mode 100644 shared_migrations/db/__init__.py delete mode 100644 shared_migrations/db/discord_bot.py delete mode 100644 shared_migrations/db/dmp_api.py delete mode 100644 shared_migrations/db/dmp_cron.py delete mode 100644 shared_migrations/db/models.py delete mode 100644 shared_migrations/db/server.py delete mode 100644 shared_migrations/migrations/README delete mode 100644 shared_migrations/migrations/env.py delete mode 100644 shared_migrations/migrations/script.py.mako delete mode 100644 shared_migrations/migrations/versions/8d1e6a7e959a_initial_migration.py delete mode 100644 shared_migrations/requirements.txt delete mode 100644 shared_migrations/sample.env diff --git a/shared_migrations/.gitignore b/shared_migrations/.gitignore deleted file mode 100644 index 2eea525..0000000 --- a/shared_migrations/.gitignore +++ /dev/null @@ -1 +0,0 @@ -.env \ No newline at end of file diff --git a/shared_migrations/.vscode/settings.json b/shared_migrations/.vscode/settings.json deleted file mode 100644 index acd8ec1..0000000 --- a/shared_migrations/.vscode/settings.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "python.analysis.extraPaths": [ - "./db" - ] -} \ No newline at end of file diff --git a/shared_migrations/README.md b/shared_migrations/README.md deleted file mode 100644 index 88b2a7e5e073d26c94e1ef212826f7ed54b4701f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 58 zcmezWPnki1p_n0qA(5eoA(bJ8L6;$yAs diff --git a/shared_migrations/alembic.ini b/shared_migrations/alembic.ini deleted file mode 100644 index 8dae057..0000000 --- a/shared_migrations/alembic.ini +++ /dev/null @@ -1,118 +0,0 @@ -# A generic, single database configuration. - -[alembic] -# path to migration scripts -# Use forward slashes (/) also on windows to provide an os agnostic path -script_location = migrations - -# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s -# Uncomment the line below if you want the files to be prepended with date and time -# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file -# for all available tokens -# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s - -# sys.path path, will be prepended to sys.path if present. -# defaults to the current working directory. -prepend_sys_path = . - -# timezone to use when rendering the date within the migration file -# as well as the filename. -# If specified, requires the python>=3.9 or backports.zoneinfo library. -# Any required deps can installed by adding `alembic[tz]` to the pip requirements -# string value is passed to ZoneInfo() -# leave blank for localtime -# timezone = - -# max length of characters to apply to the "slug" field -# truncate_slug_length = 40 - -# set to 'true' to run the environment during -# the 'revision' command, regardless of autogenerate -# revision_environment = false - -# set to 'true' to allow .pyc and .pyo files without -# a source .py file to be detected as revisions in the -# versions/ directory -# sourceless = false - -# version location specification; This defaults -# to migrations/versions. When using multiple version -# directories, initial revisions must be specified with --version-path. -# The path separator used here should be the separator specified by "version_path_separator" below. -# version_locations = %(here)s/bar:%(here)s/bat:migrations/versions - -# version path separator; As mentioned above, this is the character used to split -# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. -# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. -# Valid values for version_path_separator are: -# -# version_path_separator = : -# version_path_separator = ; -# version_path_separator = space -# version_path_separator = newline -version_path_separator = os # Use os.pathsep. Default configuration used for new projects. - -# set to 'true' to search source files recursively -# in each "version_locations" directory -# new in Alembic version 1.10 -# recursive_version_locations = false - -# the output encoding used when revision files -# are written from script.py.mako -# output_encoding = utf-8 - -sqlalchemy.url = %(DATABASE_URL)s - - - -[post_write_hooks] -# post_write_hooks defines scripts or Python functions that are run -# on newly generated revision scripts. See the documentation for further -# detail and examples - -# format using "black" - use the console_scripts runner, against the "black" entrypoint -# hooks = black -# black.type = console_scripts -# black.entrypoint = black -# black.options = -l 79 REVISION_SCRIPT_FILENAME - -# lint with attempts to fix using "ruff" - use the exec runner, execute a binary -# hooks = ruff -# ruff.type = exec -# ruff.executable = %(here)s/.venv/bin/ruff -# ruff.options = --fix REVISION_SCRIPT_FILENAME - -# Logging configuration -[loggers] -keys = root,sqlalchemy,alembic - -[handlers] -keys = console - -[formatters] -keys = generic - -[logger_root] -level = WARNING -handlers = console -qualname = - -[logger_sqlalchemy] -level = WARNING -handlers = -qualname = sqlalchemy.engine - -[logger_alembic] -level = INFO -handlers = -qualname = alembic - -[handler_console] -class = StreamHandler -args = (sys.stderr,) -level = NOTSET -formatter = generic - -[formatter_generic] -format = %(levelname)-5.5s [%(name)s] %(message)s -datefmt = %H:%M:%S diff --git a/shared_migrations/db/__init__.py b/shared_migrations/db/__init__.py deleted file mode 100644 index 52d6df0..0000000 --- a/shared_migrations/db/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -import os - -# from discord import Member -from dotenv import load_dotenv -from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession -from sqlalchemy.orm import sessionmaker -from sqlalchemy.pool import NullPool - -load_dotenv(".env") - - -def get_postgres_uri(): - DB_HOST = os.getenv('POSTGRES_DB_HOST') - DB_NAME = os.getenv('POSTGRES_DB_NAME') - DB_USER = os.getenv('POSTGRES_DB_USER') - DB_PASS = os.getenv('POSTGRES_DB_PASS') - - # DB_URL = os.getenv('DATABASE_URL') - # print('db') - return f'postgresql+asyncpg://{DB_USER}:{DB_PASS}@{DB_HOST}/{DB_NAME}' - - -class PostgresORM: - - def __init__(self): - DATABASE_URL = get_postgres_uri() - # Initialize Async SQLAlchemy - engine = create_async_engine(DATABASE_URL, echo=False,poolclass=NullPool) - async_session = sessionmaker(autocommit=False, autoflush=False, bind=engine, class_=AsyncSession) - self.session = async_session - - def get_instance(): - return PostgresORM() \ No newline at end of file diff --git a/shared_migrations/db/discord_bot.py b/shared_migrations/db/discord_bot.py deleted file mode 100644 index d9f5d13..0000000 --- a/shared_migrations/db/discord_bot.py +++ /dev/null @@ -1,280 +0,0 @@ -from sqlalchemy import create_engine,select,desc,update,delete -from .models import * -from sqlalchemy.ext.declarative import DeclarativeMeta - -class DiscordBotQueries: - - def convert_dict(self,data): - try: - if type(data) == list: - data = [val.to_dict() for val in data] - else: - return [data.to_dict()] - - return data - except Exception as e: - print(e) - raise Exception - - def getStatsStorage(self, fileName): - return self.client.storage.from_("c4gt-github-profile").download(fileName) - - - def logVCAction(self,user, action): - try: - new_log = VcLogs(discord_id=user.id, discord_name=user.name, option=action) - self.session.add(new_log) - self.session.commit() - return self.convert_dict(new_log) - except Exception as e: - self.session.rollback() - print("Error logging VC action:", e) - return None - - def getLeaderboard(self, id: int): - data = self.session.query(Leaderboard).where(Leaderboard.discord_id == id).all() - return self.convert_dict(data) - - - def read(self, table_class, query_key, query_value, columns=None): - try: - stmt = select(table_class) - stmt = stmt.where(getattr(table_class, query_key) == query_value) - - if columns: - stmt = stmt.with_only_columns(*(getattr(table_class, col) for col in columns)) - result = self.session.execute(stmt) - rows = result.fetchall() - column_names = [col.name for col in stmt.columns] - data = [dict(zip(column_names, row)) for row in rows] - return data - - result = self.session.execute(stmt) - return self.convert_dict(result.scalars().all()) - - except Exception as e: - print(f"Error reading data from table '{table_class}':", e) - return None - - def get_class_by_tablename(self,tablename): - try: - for cls in Base.registry._class_registry.values(): - if isinstance(cls, DeclarativeMeta): - if hasattr(cls, '__tablename__') and cls.__tablename__ == tablename: - return cls - return None - except Exception as e: - print(f"ERROR get_class_by_tablename - {e}") - return None - - def read_by_order_limit(self, table_class, query_key, query_value, order_column, order_by=False, limit=1, columns="*"): - try: - stmt = select(table_class) - stmt = stmt.where(getattr(table_class, query_key) == query_value) - if order_by: - stmt = stmt.order_by(desc(getattr(table_class, order_column))) - else: - stmt = stmt.order_by(getattr(table_class, order_column)) - - stmt = stmt.limit(limit) - if columns != "*": - stmt = stmt.with_only_columns(*(getattr(table_class, col) for col in columns)) - - result = self.session.execute(stmt) - results = result.fetchall() - - # Convert results to list of dictionaries - column_names = [col['name'] for col in result.keys()] - data = [dict(zip(column_names, row)) for row in results] - - return data - - except Exception as e: - print("Error reading data:", e) - return None - - async def read_all(self,table_class): - try: - table = self.get_class_by_tablename(table_class) - # Query all records from the specified table class - async with self.session() as session: - stmt = select(table) - result = await session.execute(stmt) - - data = result.scalars().all() - result = self.convert_dict(data) - return result - except Exception as e: - print(f"An error occurred -read_all_from_table : {e}") - return None - - def update(self, table_class, update_data, query_key, query_value): - try: - stmt = ( - update(table_class) - .where(getattr(table_class, query_key) == query_value) - .values(update_data) - .returning(*[getattr(table_class, col) for col in update_data.keys()]) # Return updated columns - ) - - result = self.session.execute(stmt) - self.session.commit() - updated_record = result.fetchone() - - if updated_record: - updated_record_dict = dict(zip(result.keys(), updated_record)) - return updated_record_dict - else: - return None - except Exception as e: - import pdb;pdb.set_trace() - print("Error updating record:", e) - return None - - - def insert(self, table, data): - try: - new_record = table(**data) - self.session.add(new_record) - self.session.commit() - return new_record.to_dict() - except Exception as e: - print("Error inserting data:", e) - self.session.rollback() # Rollback in case of error - return None - - - def memberIsAuthenticated(self, member: Member): - data = self.session.query(ContributorsRegistration).where(ContributorsRegistration.discord_id == member.id).all() - if data: - return True - else: - return False - - def addChapter(self, roleId: int, orgName: str, type: str): - try: - existing_record = self.session.query(Chapters).filter_by(discord_role_id=roleId).first() - - if existing_record: - existing_record.type = type - existing_record.org_name = orgName - else: - new_record = Chapters(discord_role_id=roleId, type=type, org_name=orgName) - self.session.add(new_record) - - self.session.commit() - return existing_record.to_dict() if existing_record else new_record.to_dict() - except Exception as e: - print("Error adding or updating chapter:", e) - return None - - - def deleteChapter(self,roleId: int): - try: - # Build the delete statement - stmt = delete(Chapters).where(Chapters.discord_role_id == roleId) - result = self.session.execute(stmt) - self.session.commit() - return True if result.rowcount else False - except Exception as e: - print("Error deleting chapter:", e) - return None - - async def updateContributor(self, contributor: Member, table_class=None): - try: - async with self.session() as session: - if table_class == None: - table_class = ContributorsDiscord - chapters = lookForRoles(contributor.roles)["chapter_roles"] - gender = lookForRoles(contributor.roles)["gender"] - - # Prepare the data to be upserted - update_data = { - "discord_id": contributor.id, - "discord_username": contributor.name, - "chapter": chapters[0] if chapters else None, - "gender": gender, - "joined_at": contributor.joined_at, - } - - stmt = select(ContributorsDiscord).where(ContributorsDiscord.discord_id == contributor.id) - result = await session.execute(stmt) - existing_record = result.scalars().first() - - if existing_record: - stmt = ( - update(table_class) - .where(table_class.discord_id == contributor.id) - .values(update_data) - ) - self.session.execute(stmt) - else: - new_record = table_class(**update_data) - self.session.add(new_record) - - # Commit the transaction - self.session.commit() - return True - except Exception as e: - print("Error updating contributor:", e) - return False - - - def updateContributors(self, contributors: [Member], table_class): - try: - for contributor in contributors: - chapters = lookForRoles(contributor.roles)["chapter_roles"] - gender = lookForRoles(contributor.roles)["gender"] - update_data = { - "discord_id": contributor.id, - "discord_username": contributor.name, - "chapter": chapters[0] if chapters else None, - "gender": gender, - "joined_at": contributor.joined_at, - } - existing_record = self.session.query(table_class).filter_by(discord_id=contributor.id).first() - - if existing_record: - stmt = ( - update(table_class) - .where(table_class.discord_id == contributor.id) - .values(update_data) - ) - self.session.execute(stmt) - else: - new_record = table_class(**update_data) - self.session.add(new_record) - - self.session.commit() - return True - except Exception as e: - print("Error updating contributors:", e) - return False - - - def deleteContributorDiscord(self, contributorDiscordIds, table_class=None): - try: - if table_class == None: - table_class = ContributorsDiscord - stmt = delete(table_class).where(table_class.discord_id.in_(contributorDiscordIds)) - self.session.execute(stmt) - self.session.commit() - - return True - except Exception as e: - print("Error deleting contributors:", e) - self.session.rollback() - return False - - - - def read_all_active(self, table): - if table == "contributors_discord": - table = ContributorsDiscord - data = self.session.query(table).where(table.is_active == True).all() - return self.convert_dict(data) - - def invalidateContributorDiscord(self, contributorDiscordIds): - table = "contributors_discord" - for id in contributorDiscordIds: - self.client.table(table).update({'is_active': 'false'}).eq('discord_id', id).execute() diff --git a/shared_migrations/db/dmp_api.py b/shared_migrations/db/dmp_api.py deleted file mode 100644 index 7528a8b..0000000 --- a/shared_migrations/db/dmp_api.py +++ /dev/null @@ -1,103 +0,0 @@ -from .models import * -from sqlalchemy import func -from sqlalchemy.future import select - - -class DmpAPIQueries: - - async def get_issue_query(async_session): - try: - async with async_session() as session: - results = await session.execute( - select( - DmpOrgs.id.label('org_id'), - DmpOrgs.name.label('org_name'), - func.json_agg( - func.json_build_object( - 'id', DmpIssues.id, - 'name', DmpIssues.title - ) - ).label('issues') - ) - .outerjoin(DmpIssues, DmpOrgs.id == DmpIssues.org_id) - .group_by(DmpOrgs.id) - .order_by(DmpOrgs.id) - ) - - # Extract results as a list of dictionaries if needed - data = results.all() - - return data - except Exception as e: - print(f"An error occurred: get_column_value {e}") - return None - - - async def get_issue_owner(async_session, name): - try: - async with async_session() as session: - response = await session.execute( - select(DmpOrgs).filter_by(name=name) - ) - results = response.scalars().all() - return results - except Exception as e: - print(f"An error occurred: get_column_value {e}") - return None - - async def get_actual_owner_query(async_session, owner): - try: - async with async_session() as session: - response = await session.execute( - select(DmpIssues).filter(DmpIssues.repo_owner.like(f'%{owner}%')) - ) - results = response.scalars().all() # Fetch all matching rows as objects - results = [val.to_dict() for val in results] # Convert objects to dicts - return results - except Exception as e: - print(f"An error occurred: get_column_value {e}") - return None - - - async def get_dmp_issues(async_session, issue_id): - try: - async with async_session() as session: - response = await session.execute( - select(DmpIssues).filter_by(id=int(issue_id)) - ) - results = response.scalars().all() # Fetch all matching rows as objects - results = [val.to_dict() for val in results] # Convert objects to dicts - return results - except Exception as e: - print(f"An error occurred: get_column_value {e}") - return None - - - async def get_dmp_issue_updates(async_session, dmp_issue_id): - try: - async with async_session() as session: - response = await session.execute( - select(DmpIssueUpdates).filter_by(dmp_id=int(dmp_issue_id)) - ) - results = response.scalars().all() # Fetch all matching rows as objects - results = [val.to_dict() for val in results] # Convert objects to dicts - return results - except Exception as e: - print(f"An error occurred: get_column_value {e}") - return None - - - async def get_pr_data(async_session, dmp_issue_id): - try: - async with async_session() as session: - response = await session.execute( - select(DmpPrUpdates).filter_by(dmp_id=int(dmp_issue_id)) - ) - pr_updates = response.scalars().all() # Fetch all matching rows as objects - pr_updates_dict = [pr_update.to_dict() for pr_update in pr_updates] # Convert objects to dicts - return pr_updates_dict - except Exception as e: - print(f"An error occurred: get_column_value {e}") - return None - - \ No newline at end of file diff --git a/shared_migrations/db/dmp_cron.py b/shared_migrations/db/dmp_cron.py deleted file mode 100644 index 26e6d0e..0000000 --- a/shared_migrations/db/dmp_cron.py +++ /dev/null @@ -1,235 +0,0 @@ -from sqlalchemy.future import select -from .models import * -from sqlalchemy import update -from sqlalchemy.dialects.postgresql import insert -from datetime import datetime -from sqlalchemy.orm import aliased -from sqlalchemy.exc import NoResultFound - - -class DmpCronQueries: - - async def get_timestamp(async_session, model, col_name: str, col: str, value): - try: - # Construct the ORM query - query = select(getattr(model, col_name)).filter(getattr(model, col) == value) - - # Execute the query and fetch the result - async with async_session() as session: - result = await session.execute(query) - return result.scalar() - - except NoResultFound: - return None - except Exception as e: - print(f"An error occurred: get_column_value {e}") - return None - - async def get_all_dmp_issues(async_session): - try: - async with async_session() as session: - # Alias for the DmpOrg table to use in the JSON_BUILD_OBJECT - dmp_org_alias = aliased(DmpOrgs) - - # Build the query - query = ( - select( - DmpIssues, - func.json_build_object( - 'created_at', dmp_org_alias.created_at, - 'description', dmp_org_alias.description, - 'id', dmp_org_alias.id, - 'link', dmp_org_alias.link, - 'name', dmp_org_alias.name, - 'repo_owner', dmp_org_alias.repo_owner - ).label('dmp_orgs') - ) - .outerjoin(dmp_org_alias, DmpIssues.org_id == dmp_org_alias.id) - .filter(DmpIssues.org_id.isnot(None)) - .order_by(DmpIssues.id) - ) - - # Execute the query and fetch results - result = await session.execute(query) - rows = result.fetchall() - - # Convert results to dictionaries - data = [] - for row in rows: - issue_dict = row._asdict() # Convert row to dict - dmp_orgs = issue_dict.pop('dmp_orgs') # Extract JSON object from row - issue_dict['dmp_orgs'] = dmp_orgs - issue_dict.update(issue_dict['DmpIssues'].to_dict()) - # Add JSON object back to dict - del issue_dict['DmpIssues'] - data.append(issue_dict) - - return data - - except Exception as e: - print(e) - raise Exception - - async def update_dmp_issue(async_session,issue_id: int, update_data: dict): - try: - async with async_session() as session: - async with session.begin(): - # Build the update query - query = ( - update(DmpIssues) - .where(DmpIssues.id == issue_id) - .values(**update_data) - ) - - # Execute the query - await session.execute(query) - await session.commit() - return True - - except Exception as e: - return False - - - async def upsert_data_orm(async_session, update_data): - try: - - async with async_session() as session: - async with session.begin(): - - # Define the insert statement - stmt = insert(DmpIssueUpdates).values(**update_data) - - # Define the update statement in case of conflict - stmt = stmt.on_conflict_do_update( - index_elements=['comment_id'], - set_={ - 'body_text': stmt.excluded.body_text, - 'comment_link': stmt.excluded.comment_link, - 'comment_api': stmt.excluded.comment_api, - 'comment_updated_at': stmt.excluded.comment_updated_at, - 'dmp_id': stmt.excluded.dmp_id, - 'created_by': stmt.excluded.created_by, - 'created_at': stmt.excluded.created_at - } - ) - - # Execute the statement - await session.execute(stmt) - await session.commit() - - return True - - except Exception as e: - print(e) - return False - - - - async def upsert_pr_update(async_session, pr_update_data): - try: - async with async_session() as session: - async with session.begin(): - pr_update_data['pr_updated_at'] = datetime.fromisoformat(pr_update_data['pr_updated_at']).replace(tzinfo=None) if pr_update_data['pr_updated_at'] else None - pr_update_data['merged_at'] = datetime.fromisoformat(pr_update_data['merged_at']).replace(tzinfo=None) if pr_update_data['merged_at'] else None - pr_update_data['closed_at'] = datetime.fromisoformat(pr_update_data['closed_at']).replace(tzinfo=None) if pr_update_data['closed_at'] else None - - # Prepare the insert statement - stmt = insert(DmpPrUpdates).values(**pr_update_data) - - # Prepare the conflict resolution strategy - stmt = stmt.on_conflict_do_update( - index_elements=['pr_id'], # Assuming `pr_id` is the unique key - set_={ - 'status': stmt.excluded.status, - 'merged_at': stmt.excluded.merged_at, - 'closed_at': stmt.excluded.closed_at, - 'pr_updated_at': stmt.excluded.pr_updated_at, - 'dmp_id': stmt.excluded.dmp_id, - 'created_at': stmt.excluded.created_at, - 'title': stmt.excluded.title, - 'link': stmt.excluded.link - } - ) - # Execute and commit the transaction - await session.execute(stmt) - await session.commit() - - return True - - except Exception as e: - print(e) - return False - - - - async def update_dmp_week_update(async_session, update_data): - try: - async with async_session() as session: - async with session.begin(): - # Define the filter conditions - stmt = ( - select(DmpWeekUpdates) - .where( - DmpWeekUpdates.week == update_data['week'], - DmpWeekUpdates.dmp_id == update_data['dmp_id'] - ) - ) - - # Fetch the row that needs to be updated - result = await session.execute(stmt) - dmp_week_update = result.scalars().first() - - if dmp_week_update: - # Update the fields with the values from update_data - for key, value in update_data.items(): - setattr(dmp_week_update, key, value) - - # Commit the changes - await session.commit() - return True - except Exception as e: - print(e) - return False - - - - async def get_week_updates(async_session, dmp_id, week): - try: - async with async_session() as session: - # Build the ORM query - stmt = select(DmpWeekUpdates).where( - DmpWeekUpdates.dmp_id == dmp_id, - DmpWeekUpdates.week == week - ) - # Execute the query - result = await session.execute(stmt) - - # Fetch all matching rows - week_updates = result.scalars().all() - - - return True if len(week_updates)>0 else False - - except Exception as e: - return False - - - - async def insert_dmp_week_update(async_session, update_data): - try: - async with async_session() as session: - async with session.begin(): - # Define the insert statement - stmt = insert(DmpWeekUpdates).values(**update_data) - - # Execute the statement - await session.execute(stmt) - await session.commit() - - return True - - except Exception as e: - print(e) - return False - - diff --git a/shared_migrations/db/models.py b/shared_migrations/db/models.py deleted file mode 100644 index bd05abe..0000000 --- a/shared_migrations/db/models.py +++ /dev/null @@ -1,1586 +0,0 @@ -from datetime import datetime -from sqlalchemy.orm import relationship -from sqlalchemy import UUID, Boolean, Float, MetaData, Column, Integer, SmallInteger, String, Text, DateTime, ForeignKey, BigInteger, TypeDecorator, UniqueConstraint, func -from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy.types import TypeDecorator, DateTime as SA_DateTime - -Base = declarative_base() -# Shared metadata object -shared_metadata = MetaData() - -class DateTime(TypeDecorator): - impl = SA_DateTime - - def process_bind_param(self, value, dialect): - if isinstance(value, str): - try: - # Convert string to datetime - return datetime.fromisoformat(value) - except ValueError: - # If conversion fails, return None - return None - return value - - def process_result_value(self, value, dialect): - return value - - -class AppComments(Base): - __tablename__ = 'app_comments' - - id = Column(BigInteger, primary_key=True, autoincrement=True) - updated_at = Column(DateTime, nullable=True) - api_url = Column(Text, nullable=True) - comment_id = Column(BigInteger, nullable=True) - issue_id = Column(BigInteger, unique=True) - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'id': str(self.id), - 'updated_at': self.updated_at, - 'api_url': self.api_url, - 'comment_id': self.comment_id, - 'issue_id': self.issue_id - } - -class Badges(Base): - __tablename__ = 'badges' - id = Column(UUID(as_uuid=True), primary_key=True) - image = Column(Text, nullable=True) - text = Column(Text, nullable=True) - description = Column(Text, nullable=True) - created_at = Column(DateTime, nullable=True) - updated_at = Column(DateTime, nullable=True) - - user_badges = relationship('UserBadges', back_populates='badge') - - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'image': self.image, - 'text': self.text, - 'description': self.description, - 'created_at': self.created_at, - 'updated_at': self.updated_at - } - -class CcbpTickets(Base): - __tablename__ = 'ccbp_tickets' - __table_args__ = {'comment': 'A table to store details of CCBP Tickets from various projects'} - - created_at = Column(DateTime, nullable=True) - name = Column(Text, nullable=True) - product = Column(Text, nullable=True) - complexity = Column(Text, nullable=True) - project_category = Column(Text, nullable=True) - project_sub_category = Column(Text, nullable=True) - reqd_skills = Column(Text, nullable=True) - issue_id = Column(BigInteger, unique=True) - api_endpoint_url = Column(Text, unique=True, nullable=True) - url = Column(Text, unique=True, nullable=True) - ticket_points = Column(SmallInteger, nullable=True, comment='How many points the ticket is worth') - index = Column(SmallInteger, unique=True, autoincrement=True) - mentors = Column(Text, nullable=True) - uuid = Column(UUID(as_uuid=True), primary_key=True) - status = Column(Text, nullable=True) - community_label = Column(Boolean, nullable=True, comment='has community label') - organization = Column(Text, nullable=True) - closed_at = Column(DateTime, nullable=True, comment='date-time at which issue was closed') - assignees = Column(Text, nullable=True) - issue_author = Column(Text, nullable=True) - is_assigned = Column(Boolean, nullable=False) - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'created_at': self.created_at, - 'name': self.name, - 'product': self.product, - 'complexity': self.complexity, - 'project_category': self.project_category, - 'project_sub_category': self.project_sub_category, - 'reqd_skills': self.reqd_skills, - 'issue_id': self.issue_id, - 'api_endpoint_url': self.api_endpoint_url, - 'url': self.url, - 'ticket_points': self.ticket_points, - 'index': self.index, - 'mentors': self.mentors, - 'uuid': str(self.uuid), - 'status': self.status, - 'community_label': self.community_label, - 'organization': self.organization, - 'closed_at': self.closed_at, - 'assignees': self.assignees, - 'issue_author': self.issue_author, - 'is_assigned': self.is_assigned - } - -class Chapters(Base): - __tablename__ = 'chapters' - - id = Column(UUID(as_uuid=True), primary_key=True) - type = Column(Text, nullable=True) - org_name = Column(Text, unique=True) - primary_organisation = Column(Text, nullable=True, comment='the organisation that the chapter is mapped to') - sessions = Column(Integer, nullable=True) - discord_role_id = Column(BigInteger, unique=True, comment='db id of the corresponding member role in discord server') - created_at = Column(DateTime, nullable=True) - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'id': self.id, - 'type': self.type, - 'org_name': self.org_name, - 'primary_organisation': self.primary_organisation, - 'sessions': self.sessions, - 'discord_role_id': self.discord_role_id, - 'created_at': self.created_at - } - - -## - -class ConnectedPrs(Base): - __tablename__ = 'connected_prs' - - id = Column(UUID(as_uuid=True), primary_key=True) - created_at = Column(DateTime, nullable=True) - api_url = Column(Text, nullable=True) - html_url = Column(Text, unique=True, nullable=True) - raised_by = Column(BigInteger, nullable=True) - raised_at = Column(DateTime, nullable=False) - raised_by_username = Column(Text, nullable=False) - status = Column(Text, nullable=True) - is_merged = Column(Boolean, nullable=True) - merged_by = Column(BigInteger, nullable=True) - merged_at = Column(Text, nullable=True) - merged_by_username = Column(Text, nullable=True) - pr_id = Column(BigInteger, nullable=False, comment='github id of the pr') - points = Column(SmallInteger, nullable=False) - ticket_url = Column(Text, nullable=False) - ticket_complexity = Column(Text, nullable=True) - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'id': self.id, - 'created_at': self.created_at, - 'api_url': self.api_url, - 'html_url': self.html_url, - 'raised_by': self.raised_by, - 'raised_at': self.raised_at, - 'raised_by_username': self.raised_by_username, - 'status': self.status, - 'is_merged': self.is_merged, - 'merged_by': self.merged_by, - 'merged_at': self.merged_at, - 'merged_by_username': self.merged_by_username, - 'pr_id': self.pr_id, - 'points': self.points, - 'ticket_url': self.ticket_url, - 'ticket_complexity': self.ticket_complexity - } - -class ContributorNames(Base): - __tablename__ = 'contributor_names' - - id = Column(BigInteger, primary_key=True, autoincrement=True) - discord_id = Column(BigInteger, nullable=False) - name = Column(Text, nullable=True) - country = Column(Text, nullable=True) - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'id': self.id, - 'discord_id': self.discord_id, - 'name': self.name, - 'country': self.country - } - -class ContributorsDiscord(Base): - __tablename__ = 'contributors_discord' - - id = Column(BigInteger, primary_key=True, autoincrement=True) - discord_id = Column(BigInteger, unique=True, nullable=False) - github_id = Column(BigInteger, nullable=True) - github_url = Column(String, nullable=True) - discord_username = Column(String, nullable=True) - joined_at = Column(DateTime, nullable=False) - email = Column(Text, nullable=True) - field_name = Column(Text, nullable=True, name='name') # Adjusted field name - chapter = Column(Text, nullable=True, comment="the chapter they're associated with") - gender = Column(Text, nullable=True) - is_active = Column(Boolean, nullable=False) - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'id': self.id, - 'discord_id': self.discord_id, - 'github_id': self.github_id, - 'github_url': self.github_url, - 'discord_username': self.discord_username, - 'joined_at': self.joined_at, - 'email': self.email, - 'name': self.field_name, - 'chapter': self.chapter, - 'gender': self.gender, - 'is_active': self.is_active - } - -class ContributorsRegistration(Base): - __tablename__ = 'contributors_registration' - - id = Column(BigInteger, primary_key=True, autoincrement=True) - discord_id = Column(BigInteger, unique=True, nullable=False) - github_id = Column(BigInteger, unique=True, nullable=False) - github_url = Column(String, nullable=False) - discord_username = Column(String, nullable=True) - joined_at = Column(DateTime, nullable=False) - email = Column(Text, nullable=True) - name = Column(Text, nullable=True) - - point_transactions = relationship('PointTransactions', back_populates='contributor') - - user_activities = relationship('UserActivity', back_populates='contributor') - user_points_mappings = relationship('UserPointsMapping', back_populates='contributors') - - - def __repr__(self): - return f"" - - - def to_dict(self): - return { - 'id': self.id, - 'discord_id': self.discord_id, - 'github_id': self.github_id, - 'github_url': self.github_url, - 'discord_username': self.discord_username, - 'joined_at': self.joined_at, - 'email': self.email, - 'name': self.name - } - -class DiscordChannels(Base): - __tablename__ = 'discord_channels' - - channel_id = Column(BigInteger, primary_key=True) - channel_name = Column(Text, nullable=True) - webhook = Column(Text, nullable=True) - should_notify = Column(Boolean, nullable=False) - - products = relationship('Product', back_populates='channel') - - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'channel_id': self.channel_id, - 'channel_name': self.channel_name, - 'webhook': self.webhook, - 'should_notify': self.should_notify - } - -class DiscordEngagement(Base): - __tablename__ = 'discord_engagement' - __table_args__ = {'comment': 'engagement metrics for contributors'} - - id = Column(BigInteger, primary_key=True, autoincrement=True) - created_at = Column(DateTime, nullable=True) - contributor = Column(BigInteger, unique=True, nullable=False) - has_introduced = Column(Boolean, nullable=True) - total_message_count = Column(BigInteger, nullable=True) - total_reaction_count = Column(BigInteger, nullable=True) - converserbadge = Column(Boolean, nullable=True) - apprenticebadge = Column(Boolean, nullable=True) - rockstarbadge = Column(Boolean, nullable=True) - enthusiastbadge = Column(Boolean, nullable=True) - risingstarbadge = Column(Boolean, nullable=True) - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'id': self.id, - 'created_at': self.created_at, - 'contributor': self.contributor, - 'has_introduced': self.has_introduced, - 'total_message_count': self.total_message_count, - 'total_reaction_count': self.total_reaction_count, - 'converserbadge': self.converserbadge, - 'apprenticebadge': self.apprenticebadge, - 'rockstarbadge': self.rockstarbadge, - 'enthusiastbadge': self.enthusiastbadge, - 'risingstarbadge': self.risingstarbadge - } - -class DmpIssueUpdates(Base): - __tablename__ = 'dmp_issue_updates' - __table_args__ = {'comment': 'Having records of dmp with issue details'} - - created_at = Column(DateTime, nullable=False) - body_text = Column(Text, nullable=True) - comment_link = Column(Text, nullable=True) - comment_id = Column(BigInteger, primary_key=True) - comment_api = Column(String, nullable=True) - comment_updated_at = Column(DateTime, nullable=True) - dmp_id = Column(BigInteger, ForeignKey('dmp_issues.id'), nullable=False) - created_by = Column(Text, nullable=False) - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'created_at': self.created_at, - 'body_text': self.body_text, - 'comment_link': self.comment_link, - 'comment_id': self.comment_id, - 'comment_api': self.comment_api, - 'comment_updated_at': self.comment_updated_at, - 'dmp_id': self.dmp_id, - 'created_by': self.created_by - } - - -class DmpIssues(Base): - __tablename__ = 'dmp_issues' - - id = Column(BigInteger, primary_key=True, autoincrement=True) - issue_url = Column(String, nullable=False) - issue_number = Column(BigInteger, nullable=False) - mentor_username = Column(Text, nullable=True) - contributor_username = Column(Text, nullable=True) - title = Column(Text, nullable=False) - org_id = Column(BigInteger, ForeignKey('dmp_orgs.id'), nullable=False) - description = Column(Text, nullable=False) - repo = Column(Text, nullable=False) - repo_owner = Column(Text, nullable=False) - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'id': self.id, - 'issue_url': self.issue_url, - 'issue_number': self.issue_number, - 'mentor_username': self.mentor_username, - 'contributor_username': self.contributor_username, - 'title': self.title, - 'org_id': self.org_id, - 'description': self.description, - 'repo': self.repo, - 'repo_owner': self.repo_owner - } - -class DmpOrgs(Base): - __tablename__ = 'dmp_orgs' - - id = Column(BigInteger, primary_key=True, autoincrement=True) - created_at = Column(DateTime, nullable=False) - name = Column(Text, nullable=False) - description = Column(Text, nullable=False) - link = Column(Text, nullable=False) - repo_owner = Column(Text, nullable=False) - - # issues = relationship('Issues', backref='organization', lazy='joined') - - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'id': self.id, - 'created_at': self.created_at, - 'name': self.name, - 'description': self.description, - 'link': self.link, - 'repo_owner': self.repo_owner - } - -class DmpPrUpdates(Base): - __tablename__ = 'dmp_pr_updates' - __table_args__ = {'comment': 'Having PR related records'} - - created_at = Column(DateTime, nullable=False) - pr_id = Column(BigInteger, primary_key=True) - status = Column(String, nullable=False) - title = Column(Text, nullable=False) - pr_updated_at = Column(DateTime, nullable=True) - merged_at = Column(DateTime, nullable=True) - closed_at = Column(DateTime, nullable=True) - dmp_id = Column(BigInteger, ForeignKey('dmp_issues.id'), nullable=False) - link = Column(Text, nullable=False) - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'created_at': self.created_at, - 'pr_id': self.pr_id, - 'status': self.status, - 'title': self.title, - 'pr_updated_at': self.pr_updated_at, - 'merged_at': self.merged_at, - 'closed_at': self.closed_at, - 'dmp_id': self.dmp_id, - 'link': self.link - } - -class DmpTickets(Base): - __tablename__ = 'dmp_tickets' - - created_at = Column(DateTime, nullable=True) - name = Column(Text, nullable=True) - product = Column(Text, nullable=True) - complexity = Column(Text, nullable=True) - project_category = Column(Text, nullable=True) - project_sub_category = Column(Text, nullable=True) - reqd_skills = Column(Text, nullable=True) - issue_id = Column(BigInteger, unique=True, nullable=False) - api_endpoint_url = Column(Text, unique=True, nullable=True) - url = Column(Text, unique=True, nullable=True) - ticket_points = Column(Integer, nullable=True, comment='How many points the ticket is worth') - index = Column(Integer, unique=True, autoincrement=True) - mentors = Column(Text, nullable=True) - uuid = Column(UUID(as_uuid=True), primary_key=True) - status = Column(Text, nullable=True) - community_label = Column(Boolean, nullable=True, comment='has community label') - organization = Column(Text, nullable=True) - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'created_at': self.created_at, - 'name': self.name, - 'product': self.product, - 'complexity': self.complexity, - 'project_category': self.project_category, - 'project_sub_category': self.project_sub_category, - 'reqd_skills': self.reqd_skills, - 'issue_id': self.issue_id, - 'api_endpoint_url': self.api_endpoint_url, - 'url': self.url, - 'ticket_points': self.ticket_points, - 'index': self.index, - 'mentors': self.mentors, - 'uuid': self.uuid, - 'status': self.status, - 'community_label': self.community_label, - 'organization': self.organization - } - -class DmpWeekUpdates(Base): - __tablename__ = 'dmp_week_updates' - - id = Column(BigInteger, primary_key=True, autoincrement=True) - issue_url = Column(Text, nullable=False) - week = Column(BigInteger, nullable=True) - total_task = Column(BigInteger, nullable=True) - completed_task = Column(BigInteger, nullable=True) - progress = Column(Float, nullable=True) - task_data = Column(Text, nullable=True) - dmp_id = Column(BigInteger, nullable=True) - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'id': self.id, - 'issue_url': self.issue_url, - 'week': self.week, - 'total_task': self.total_task, - 'completed_task': self.completed_task, - 'progress': self.progress, - 'task_data': self.task_data, - 'dmp_id': self.dmp_id - } - -class GithubClassroomData(Base): - __tablename__ = 'github_classroom_data' - __table_args__ = {'comment': 'Table for saving the details about github classroom assignment data'} - - id = Column(BigInteger, primary_key=True, autoincrement=True) - created_at = Column(DateTime, nullable=False) - assignment_name = Column(Text, nullable=False) - assignment_url = Column(Text, nullable=False) - assignment_id = Column(Text, nullable=True) - starter_code_url = Column(Text, nullable=False) - github_username = Column(Text, nullable=True) - roster_identifier = Column(Text, nullable=True) - student_repository_name = Column(Text, nullable=True) - student_repository_url = Column(Text, nullable=True) - submission_timestamp = Column(DateTime, nullable=False) - points_awarded = Column(Integer, nullable=True) - points_available = Column(Integer, nullable=True) - c4gt_points = Column(Integer, nullable=True) - discord_id = Column(Text, nullable=True) - updated_at = Column(DateTime, nullable=True) - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'id': self.id, - 'created_at': self.created_at, - 'assignment_name': self.assignment_name, - 'assignment_url': self.assignment_url, - 'assignment_id': self.assignment_id, - 'starter_code_url': self.starter_code_url, - 'github_username': self.github_username, - 'roster_identifier': self.roster_identifier, - 'student_repository_name': self.student_repository_name, - 'student_repository_url': self.student_repository_url, - 'submission_timestamp': self.submission_timestamp, - 'points_awarded': self.points_awarded, - 'points_available': self.points_available, - 'c4gt_points': self.c4gt_points, - 'discord_id': self.discord_id, - 'updated_at': self.updated_at - } - -class GithubInstallations(Base): - __tablename__ = 'github_installations' - - id = Column(BigInteger, primary_key=True, autoincrement=True) - github_organisation = Column(Text, unique=True, nullable=False) - installation_id = Column(BigInteger, unique=True, nullable=False) - target_type = Column(Text, nullable=True, comment='Type of github entity that installed the app, usually "Organisation"') - github_ids = Column(Text, nullable=True, comment="Identifiers on the github database, prolly won't be used") - permissions_and_events = Column(Text, nullable=True) - created_at = Column(DateTime, nullable=True) - organisation = Column(Text, ForeignKey('community_orgs.name'), nullable=True) - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'id': self.id, - 'github_organisation': self.github_organisation, - 'installation_id': self.installation_id, - 'target_type': self.target_type, - 'github_ids': self.github_ids, - 'permissions_and_events': self.permissions_and_events, - 'created_at': self.created_at, - 'organisation': self.organisation - } -## - -class GithubOrganisationsToOrganisations(Base): - __tablename__ = 'github_organisations_to_organisations' - - id = Column(BigInteger, primary_key=True, autoincrement=True) - github_organisation = Column(Text, nullable=False) - organisation = Column(Text, nullable=True) - created_at = Column(DateTime, nullable=True, comment='Creation date of organization ticket') - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'id': self.id, - 'github_organisation': self.github_organisation, - 'organisation': self.organisation, - 'created_at': self.created_at - } - -class IssueContributors(Base): - __tablename__ = 'issue_contributors' - - id = Column(BigInteger, primary_key=True, autoincrement=True) - contributor_id = Column(BigInteger, ForeignKey('contributors_registration.id')) - issue_id = Column(BigInteger, ForeignKey('issues.id'), primary_key=True) - role = Column(BigInteger, ForeignKey('role_master.id'), nullable=True) - created_at = Column(DateTime, nullable=True) - updated_at = Column(DateTime, nullable=True) - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'contributor_id': self.contributor_id, - 'issue_id': self.issue_id, - 'role_id': self.role, - 'created_at': self.created_at, - 'updated_at': self.updated_at - } - -class IssueMentors(Base): - __tablename__ = 'issue_mentors' - - id = Column(BigInteger, primary_key=True, autoincrement=True) - issue_id = Column(BigInteger, ForeignKey('issues.id')) - org_mentor_id = Column(Text, nullable=True) - angel_mentor_id = Column(BigInteger, ForeignKey('contributors_registration.id')) - created_at = Column(DateTime, nullable=True) - updated_at = Column(DateTime, nullable=True) - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'issue_id': self.issue_id, - 'org_mentor_id': self.org_mentor_id, - 'angel_mentor_id': self.angel_mentor_id, - 'created_at': self.created_at, - 'updated_at': self.updated_at - } - -class Issues(Base): - __tablename__ = 'issues' - - id = Column(BigInteger, primary_key=True) - link = Column(Text, nullable=False) - labels = Column(Text, nullable=True) - project_type = Column(Text, nullable=True) - complexity = Column(Text, nullable=True) - skills = Column(Text, nullable=True) - technology = Column(Text, nullable=True) - status = Column(Text, nullable=True) - created_at = Column(DateTime, nullable=True) - updated_at = Column(DateTime, nullable=True) - title = Column(Text, nullable=True) - domain = Column(Text, nullable=True) - description = Column(Text, nullable=True) - org_id = Column(BigInteger, ForeignKey('community_orgs.id'), nullable=True) - issue_id = Column(BigInteger, unique=True) - - point_transactions = relationship('PointTransactions', back_populates='issue') - user_activities = relationship('UserActivity', back_populates='issue') - - - - def __repr__(self): - return f"" - - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'id': self.id, - 'link': self.link, - 'labels': self.labels, - 'complexity': self.complexity, - 'skills': self.skills, - 'technology': self.technology, - 'status': self.status, - 'created_at': self.created_at, - 'updated_at': self.updated_at, - 'title': self.title, - 'description': self.description, - 'org_id': self.org_id, - 'issue_id': self.issue_id, - 'project_type':self.project_type, - 'domain': self.domain - } - -class MentorDetails(Base): - __tablename__ = 'mentor_details' - - id = Column(BigInteger, primary_key=True) - name = Column(String(255), nullable=True) - email = Column(String(255), nullable=True) - discord_id = Column(String(255), nullable=True) - discord_username = Column(String(255), nullable=True) - github_id = Column(String(255), nullable=True) - created_at = Column(DateTime, nullable=True) - updated_at = Column(DateTime, nullable=True) - - point_transactions = relationship('PointTransactions', back_populates='mentor') - user_activities = relationship('UserActivity', back_populates='mentor') - user_points_mappings = relationship('UserPointsMapping', back_populates='mentor') - - - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'id': self.id, - 'name': self.name, - 'email': self.email, - 'discord_id': self.discord_id, - 'discord_username': self.discord_username, - 'github_id': self.github_id, - 'created_at': self.created_at, - 'updated_at': self.updated_at - } - -class MentorshipProgramSiteStructure(Base): - __tablename__ = 'mentorship_program_site_structure' - - id = Column(BigInteger, primary_key=True) - product_id = Column(BigInteger, ForeignKey('product.id'), nullable=True) - project_id = Column(BigInteger, nullable=True) - contributor_id = Column(BigInteger, nullable=True) - website_directory_label = Column(Text, nullable=True) - directory_url = Column(Text, nullable=True) - - # project = relationship('MentorshipProgramProjects', back_populates='site_structures') - # contributor = relationship('MentorshipProgramSelectedContributors', back_populates='site_structures') - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'id': self.id, - 'product_id': self.product_id, - 'project_id': self.project_id, - 'contributor_id': self.contributor_id, - 'website_directory_label': self.website_directory_label, - 'directory_url': self.directory_url - } - -class MentorshipProgramWebsiteComments(Base): - __tablename__ = 'mentorship_program_website_comments' - - comment_id = Column(BigInteger, primary_key=True) - url = Column(Text, nullable=True) - html_url = Column(Text, nullable=True) - commented_by_username = Column(Text, nullable=True) - commented_by_id = Column(BigInteger, nullable=True) - created_at = Column(DateTime, nullable=True) - updated_at = Column(DateTime, nullable=True) - body = Column(Text, nullable=True) - pr_id = Column(BigInteger, nullable=True) - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'comment_id': self.comment_id, - 'url': self.url, - 'html_url': self.html_url, - 'commented_by_username': self.commented_by_username, - 'commented_by_id': self.commented_by_id, - 'created_at': self.created_at, - 'updated_at': self.updated_at, - 'body': self.body, - 'pr_id': self.pr_id - } - -class MentorshipProgramWebsiteCommits(Base): - __tablename__ = 'mentorship_program_website_commits' - - node_id = Column(Text, primary_key=True) - url = Column(Text, nullable=True) - html_url = Column(Text, nullable=True) - comment_count = Column(Integer, nullable=True) - date = Column(DateTime, nullable=True) - author_id = Column(BigInteger, nullable=True) - author_username = Column(Text, nullable=True) - author_email = Column(Text, nullable=True) - committer_id = Column(BigInteger, nullable=True) - committer_username = Column(Text, nullable=True) - committer_email = Column(Text, nullable=True) - additions = Column(Integer, nullable=True) - deletions = Column(Integer, nullable=True) - files = Column(Text, nullable=True) - project_folder_name = Column(Text, nullable=True) - pr_id = Column(BigInteger, nullable=True) - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'node_id': self.node_id, - 'url': self.url, - 'html_url': self.html_url, - 'comment_count': self.comment_count, - 'date': self.date, - 'author_id': self.author_id, - 'author_username': self.author_username, - 'author_email': self.author_email, - 'committer_id': self.committer_id, - 'committer_username': self.committer_username, - 'committer_email': self.committer_email, - 'additions': self.additions, - 'deletions': self.deletions, - 'files': self.files, - 'project_folder_name': self.project_folder_name, - 'pr_id': self.pr_id - } - -class MentorshipProgramWebsiteHasUpdated(Base): - __tablename__ = 'mentorship_program_website_has_updated' - - id = Column(BigInteger, primary_key=True) - project_id = Column(BigInteger, nullable=True) - week1_update_date = Column(DateTime, nullable=True) - week2_update_date = Column(DateTime, nullable=True) - week3_update_date = Column(DateTime, nullable=True) - week4_update_date = Column(DateTime, nullable=True) - week5_update_date = Column(DateTime, nullable=True) - week6_update_date = Column(DateTime, nullable=True) - week7_update_date = Column(DateTime, nullable=True) - week8_update_date = Column(DateTime, nullable=True) - week9_update_date = Column(DateTime, nullable=True) - week1_is_default_text = Column(Boolean, nullable=True) - week2_is_default_text = Column(Boolean, nullable=True) - week3_is_default_text = Column(Boolean, nullable=True) - week4_is_default_text = Column(Boolean, nullable=True) - week5_is_default_text = Column(Boolean, nullable=True) - week6_is_default_text = Column(Boolean, nullable=True) - week7_is_default_text = Column(Boolean, nullable=True) - week8_is_default_text = Column(Boolean, nullable=True) - week9_is_default_text = Column(Boolean, nullable=True) - product = Column(Text, nullable=True) - project_folder = Column(Text, unique=True, nullable=False) - all_links = Column(Text, nullable=True) - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'id': self.id, - 'project_id': self.project_id, - 'week1_update_date': self.week1_update_date, - 'week2_update_date': self.week2_update_date, - 'week3_update_date': self.week3_update_date, - 'week4_update_date': self.week4_update_date, - 'week5_update_date': self.week5_update_date, - 'week6_update_date': self.week6_update_date, - 'week7_update_date': self.week7_update_date, - 'week8_update_date': self.week8_update_date, - 'week9_update_date': self.week9_update_date, - 'week1_is_default_text': self.week1_is_default_text, - 'week2_is_default_text': self.week2_is_default_text, - 'week3_is_default_text': self.week3_is_default_text, - 'week4_is_default_text': self.week4_is_default_text, - 'week5_is_default_text': self.week5_is_default_text, - 'week6_is_default_text': self.week6_is_default_text, - 'week7_is_default_text': self.week7_is_default_text, - 'week8_is_default_text': self.week8_is_default_text, - 'week9_is_default_text': self.week9_is_default_text, - 'product': self.product, - 'project_folder': self.project_folder, - 'all_links': self.all_links - } - - - -## - -class MentorshipProgramWebsitePullRequest(Base): - __tablename__ = 'mentorship_program_website_pull_request' - - pr_url = Column(Text, nullable=True) - pr_id = Column(BigInteger, primary_key=True) - pr_node_id = Column(Text, unique=True, nullable=True) - html_url = Column(Text, nullable=True) - status = Column(Text, nullable=True) - title = Column(Text, nullable=True) - raised_by_username = Column(Text, nullable=True) - raised_by_id = Column(Integer, nullable=True) - body = Column(Text, nullable=True) - created_at = Column(DateTime, nullable=True) - updated_at = Column(DateTime, nullable=True) - closed_at = Column(DateTime, nullable=True) - merged_at = Column(DateTime, nullable=True) - assignees = Column(Text, nullable=True) - requested_reviewers = Column(Text, nullable=True) - labels = Column(Text, nullable=True) - review_comments_url = Column(Text, nullable=True) - comments_url = Column(Text, nullable=True) - repository_id = Column(Integer, nullable=True) - repository_owner_name = Column(Text, nullable=True) - repository_owner_id = Column(Integer, nullable=True) - repository_url = Column(Text, nullable=True) - merged = Column(Boolean, nullable=True) - number_of_commits = Column(Integer, nullable=True) - number_of_comments = Column(Integer, nullable=True) - lines_of_code_added = Column(Integer, nullable=True) - lines_of_code_removed = Column(Integer, nullable=True) - number_of_files_changed = Column(Integer, nullable=True) - merged_by_id = Column(BigInteger, nullable=True) - merged_by_username = Column(Text, nullable=True) - linked_ticket = Column(Text, nullable=True) - project_name = Column(Text, nullable=True) - project_folder_label = Column(Text, nullable=True) - week_number = Column(SmallInteger, nullable=True) - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'pr_url': self.pr_url, - 'pr_id': self.pr_id, - 'pr_node_id': self.pr_node_id, - 'html_url': self.html_url, - 'status': self.status, - 'title': self.title, - 'raised_by_username': self.raised_by_username, - 'raised_by_id': self.raised_by_id, - 'body': self.body, - 'created_at': self.created_at, - 'updated_at': self.updated_at, - 'closed_at': self.closed_at, - 'merged_at': self.merged_at, - 'assignees': self.assignees, - 'requested_reviewers': self.requested_reviewers, - 'labels': self.labels, - 'review_comments_url': self.review_comments_url, - 'comments_url': self.comments_url, - 'repository_id': self.repository_id, - 'repository_owner_name': self.repository_owner_name, - 'repository_owner_id': self.repository_owner_id, - 'repository_url': self.repository_url, - 'merged': self.merged, - 'number_of_commits': self.number_of_commits, - 'number_of_comments': self.number_of_comments, - 'lines_of_code_added': self.lines_of_code_added, - 'lines_of_code_removed': self.lines_of_code_removed, - 'number_of_files_changed': self.number_of_files_changed, - 'merged_by_id': self.merged_by_id, - 'merged_by_username': self.merged_by_username, - 'linked_ticket': self.linked_ticket, - 'project_name': self.project_name, - 'project_folder_label': self.project_folder_label, - 'week_number': self.week_number - } - -class MentorshipWebsiteContributorProject(Base): - __tablename__ = 'mentorship_website_contributor_project' - - project_folder = Column(Text, primary_key=True) - contributor = Column(Text, nullable=True) - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'project_folder': self.project_folder, - 'contributor': self.contributor - } - -class PointSystem(Base): - __tablename__ = 'point_system' - - id = Column(BigInteger, primary_key=True) - complexity = Column(Text, nullable=False) - points = Column(SmallInteger, nullable=True) - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'id': self.id, - 'complexity': self.complexity, - 'points': self.points - } - -class PointTransactions(Base): - __tablename__ = 'point_transactions' - - id = Column(BigInteger, primary_key=True) - user_id = Column(BigInteger, ForeignKey('contributors_registration.id'), nullable=True) - issue_id = Column(BigInteger, ForeignKey('issues.id'), nullable=False) - point = Column(Integer, nullable=True) - type = Column(Text, nullable=True) - created_at = Column(DateTime, default=func.now(), nullable=False) # Set to current time when created - updated_at = Column(DateTime, default=func.now(), onupdate=func.now(), nullable=False) # Updated to current time when record is modified - angel_mentor_id = Column(BigInteger, ForeignKey('mentor_details.id'), nullable=True) - - - contributor = relationship('ContributorsRegistration', back_populates='point_transactions') - issue = relationship('Issues', back_populates='point_transactions') - mentor = relationship('MentorDetails', back_populates='point_transactions') - - def __repr__(self): - return f"" - - - def to_dict(self): - return { - 'id': self.id, - 'user_id': self.user_id, - 'issue_id': self.issue_id, - 'point': self.point, - 'type': self.type, - 'created_at': self.created_at, - 'updated_at': self.updated_at, - 'angel_mentor_id': self.angel_mentor_id - } - -class PointsMapping(Base): - __tablename__ = 'points_mapping' - - id = Column(BigInteger, primary_key=True) - role = Column(String(50), nullable=False) - complexity = Column(String(50), nullable=False) - points = Column(Integer, nullable=False) - created_at = Column(DateTime, nullable=True) - updated_at = Column(DateTime, nullable=True) - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'id': self.id, - 'role': self.role, - 'complexity': self.complexity, - 'points': self.points, - 'created_at': self.created_at, - 'updated_at': self.updated_at - } - - - -### - -class PrHistory(Base): - __tablename__ = 'pr_history' - - id = Column(BigInteger, primary_key=True, autoincrement=True) - created_at = Column(DateTime, nullable=True) - api_url = Column(Text, nullable=True) - html_url = Column(Text, unique=True, nullable=True) - raised_by = Column(BigInteger, nullable=True) - raised_at = Column(DateTime, nullable=False) - raised_by_username = Column(Text, nullable=False) - status = Column(Text, nullable=True) - is_merged = Column(Boolean, nullable=True) - merged_by = Column(BigInteger, nullable=True) - merged_at = Column(Text, nullable=True) - merged_by_username = Column(Text, nullable=True) - pr_id = Column(BigInteger, nullable=False) - ticket_url = Column(Text, nullable=False) - ticket_complexity = Column(Text, nullable=True) - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'id': self.id, - 'created_at': self.created_at, - 'api_url': self.api_url, - 'html_url': self.html_url, - 'raised_by': self.raised_by, - 'raised_at': self.raised_at, - 'raised_by_username': self.raised_by_username, - 'status': self.status, - 'is_merged': self.is_merged, - 'merged_by': self.merged_by, - 'merged_at': self.merged_at, - 'merged_by_username': self.merged_by_username, - 'pr_id': self.pr_id, - 'ticket_url': self.ticket_url, - 'ticket_complexity': self.ticket_complexity - } - -class PrStaging(Base): - __tablename__ = 'pr_staging' - - id = Column(String(36), primary_key=True) # UUID field - created_at = Column(DateTime, nullable=True) - api_url = Column(Text, nullable=True) - html_url = Column(Text, unique=True, nullable=True) - raised_by = Column(BigInteger, nullable=True) - raised_at = Column(DateTime, nullable=False) - raised_by_username = Column(Text, nullable=False) - status = Column(Text, nullable=True) - is_merged = Column(Boolean, nullable=True) - merged_by = Column(BigInteger, nullable=True) - merged_at = Column(Text, nullable=True) - merged_by_username = Column(Text, nullable=True) - pr_id = Column(BigInteger, nullable=False) - points = Column(SmallInteger, nullable=False) - ticket_url = Column(Text, nullable=False) - ticket_complexity = Column(Text, nullable=True) - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'id': self.id, - 'created_at': self.created_at, - 'api_url': self.api_url, - 'html_url': self.html_url, - 'raised_by': self.raised_by, - 'raised_at': self.raised_at, - 'raised_by_username': self.raised_by_username, - 'status': self.status, - 'is_merged': self.is_merged, - 'merged_by': self.merged_by, - 'merged_at': self.merged_at, - 'merged_by_username': self.merged_by_username, - 'pr_id': self.pr_id, - 'points': self.points, - 'ticket_url': self.ticket_url, - 'ticket_complexity': self.ticket_complexity - } - -class Product(Base): - __tablename__ = 'product' - - id = Column(BigInteger, primary_key=True) # Auto field - name = Column(Text, unique=True, nullable=False) - description = Column(Text, nullable=True) - wiki_url = Column(Text, nullable=True) - channel_id = Column(BigInteger, ForeignKey('discord_channels.channel_id'), nullable=True) # Assumes 'DiscordChannels' model - - channel = relationship('DiscordChannels', back_populates='products') - - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'id': self.id, - 'name': self.name, - 'description': self.description, - 'wiki_url': self.wiki_url, - 'channel_id': self.channel_id - } - -class RoleMaster(Base): - __tablename__ = 'role_master' - - id = Column(BigInteger, primary_key=True) # Auto field - created_at = Column(DateTime, nullable=False) - updated_at = Column(DateTime, nullable=True) - role = Column(Text, nullable=True) - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'id': self.id, - 'created_at': self.created_at, - 'updated_at': self.updated_at, - 'role': self.role - } - -class TicketComments(Base): - __tablename__ = 'ticket_comments' - - id = Column(BigInteger, primary_key=True) - url = Column(Text, nullable=True) - html_url = Column(Text, nullable=True) - issue_url = Column(Text, nullable=True) - node_id = Column(Text, nullable=True) - comment_id = Column(BigInteger, nullable=True) - issue_id = Column(BigInteger, nullable=True) - commented_by = Column(Text, nullable=True) - commented_by_id = Column(BigInteger, nullable=True) - created_at = Column(DateTime, nullable=True) - updated_at = Column(DateTime, nullable=True) - content = Column(Text, nullable=True) - reactions_url = Column(Text, nullable=True) - ticket_url = Column(Text, nullable=False) - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'id': self.id, - 'url': self.url, - 'html_url': self.html_url, - 'issue_url': self.issue_url, - 'node_id': self.node_id, - 'commented_by': self.commented_by, - 'commented_by_id': self.commented_by_id, - 'created_at': self.created_at, - 'updated_at': self.updated_at, - 'content': self.content, - 'reactions_url': self.reactions_url, - 'ticket_url': self.ticket_url - } - -class UnlistedTickets(Base): - __tablename__ = 'unlisted_tickets' - - created_at = Column(DateTime, nullable=True) - name = Column(Text, nullable=True) - product = Column(Text, nullable=True) - complexity = Column(Text, nullable=True) - project_category = Column(Text, nullable=True) - project_sub_category = Column(Text, nullable=True) - reqd_skills = Column(Text, nullable=True) - issue_id = Column(BigInteger, unique=True, nullable=False) - api_endpoint_url = Column(Text, unique=True, nullable=True) - url = Column(Text, unique=True, nullable=True) - ticket_points = Column(SmallInteger, nullable=True) - index = Column(SmallInteger, unique=True, nullable=False) - mentors = Column(Text, nullable=True) - uuid = Column(String(36), primary_key=True) # UUID field - status = Column(Text, nullable=True) - organization = Column(Text, nullable=True) - - __table_args__ = (UniqueConstraint('uuid', 'issue_id'),) - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'created_at': self.created_at, - 'name': self.name, - 'product': self.product, - 'complexity': self.complexity, - 'project_category': self.project_category, - 'project_sub_category': self.project_sub_category, - 'reqd_skills': self.reqd_skills, - 'issue_id': self.issue_id, - 'api_endpoint_url': self.api_endpoint_url, - 'url': self.url, - 'ticket_points': self.ticket_points, - 'index': self.index, - 'mentors': self.mentors, - 'uuid': self.uuid, - 'status': self.status, - 'organization': self.organization - } - -class UnstructuredDiscordData(Base): - __tablename__ = 'unstructured_discord_data' - - text = Column(Text, nullable=True) - author = Column(BigInteger, nullable=True) - channel = Column(BigInteger, nullable=True) - channel_name = Column(Text, nullable=True) - uuid = Column(String(36), primary_key=True) # UUID field - author_name = Column(Text, nullable=True) - author_roles = Column(Text, nullable=True) - sent_at = Column(Text, nullable=True) - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'text': self.text, - 'author': self.author, - 'channel': self.channel, - 'channel_name': self.channel_name, - 'uuid': self.uuid, - 'author_name': self.author_name, - 'author_roles': self.author_roles, - 'sent_at': self.sent_at - } - -class UserActivity(Base): - __tablename__ = 'user_activity' - - id = Column(BigInteger, primary_key=True, autoincrement=True) - contributor_id = Column(BigInteger, ForeignKey('contributors_registration.id'), nullable=False) # Assumes 'ContributorsRegistration' model - issue_id = Column(BigInteger, ForeignKey('issues.id'), nullable=False) # Assumes 'Issues' model - activity = Column(Text, nullable=True) - created_at = Column(DateTime, nullable=True) - updated_at = Column(DateTime, nullable=True) - mentor_id = Column(BigInteger, ForeignKey('mentor_details.id'), nullable=True) # Assumes 'MentorDetails' model - - contributor = relationship('ContributorsRegistration', back_populates='user_activities') - issue = relationship('Issues', back_populates='user_activities') - mentor = relationship('MentorDetails', back_populates='user_activities') - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'contributor_id': self.contributor_id, - 'issue_id': self.issue_id, - 'activity': self.activity, - 'created_at': self.created_at, - 'updated_at': self.updated_at, - 'mentor_id': self.mentor_id - } - -class UserBadges(Base): - __tablename__ = 'user_badges' - id = Column(UUID(as_uuid=True), primary_key=True) - user_id = Column(BigInteger, ForeignKey('users.id'), nullable=False) # Assumes 'Users' model - badge_id = Column(BigInteger, ForeignKey('badges.id'), nullable=False) # Assumes 'Badges' model - created_at = Column(DateTime, nullable=True) - updated_at = Column(DateTime, nullable=True) - - user = relationship('Users', back_populates='user_badges') - badge = relationship('Badges', back_populates='user_badges') - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'user_id': self.user_id, - 'badge_id': self.badge_id, - 'created_at': self.created_at, - 'updated_at': self.updated_at - } - -class UserCertificates(Base): - __tablename__ = 'user_certificates' - id = Column(UUID(as_uuid=True), primary_key=True) - user_id = Column(BigInteger, ForeignKey('users.id'), nullable=False) # Assumes 'Users' model - certificate_link = Column(Text, nullable=True) - created_at = Column(DateTime, nullable=True) - updated_at = Column(DateTime, nullable=True) - - user = relationship('Users', back_populates='user_certificates') - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'user_id': self.user_id, - 'certificate_link': self.certificate_link, - 'created_at': self.created_at, - 'updated_at': self.updated_at - } - - - -### - -class UserPointsMapping(Base): - __tablename__ = 'user_points_mapping' - id = Column(UUID(as_uuid=True), primary_key=True) - contributor = Column(BigInteger, ForeignKey('contributors_registration.id'), nullable=True) # Assumes 'ContributorsRegistration' model - points = Column(Integer, nullable=False) - level = Column(String(50), nullable=True) - created_at = Column(DateTime, default=func.now(), nullable=False) # Set to current time when created - updated_at = Column(DateTime, default=func.now(), onupdate=func.now(), nullable=False) - mentor_id = Column(BigInteger, ForeignKey('mentor_details.id'), nullable=True) # Assumes 'MentorDetails' model - - contributors = relationship('ContributorsRegistration', back_populates='user_points_mappings') - mentor = relationship('MentorDetails', back_populates='user_points_mappings') - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'contributor_id': self.contributor, - 'points': self.points, - 'level': self.level, - 'created_at': self.created_at, - 'updated_at': self.updated_at, - 'mentor_id': self.mentor_id - } - -class Users(Base): - __tablename__ = 'users' - - id = Column(BigInteger, primary_key=True) # Assumes id is the primary key - name = Column(Text, nullable=True) - discord = Column(Text, unique=True, nullable=True) - github = Column(Text, nullable=True) - points = Column(Integer, nullable=True) - level = Column(Text, nullable=True) - created_at = Column(DateTime, nullable=True) - updated_at = Column(DateTime, nullable=True) - - user_badges = relationship('UserBadges', back_populates='user') - user_certificates = relationship('UserCertificates', back_populates='user') - - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'id': self.id, - 'name': self.name, - 'discord': self.discord, - 'github': self.github, - 'points': self.points, - 'level': self.level, - 'created_at': self.created_at, - 'updated_at': self.updated_at - } - -class VcLogs(Base): - __tablename__ = 'vc_logs' - - id = Column(BigInteger, primary_key=True) # Auto field - created_at = Column(DateTime, nullable=False) - discord_id = Column(BigInteger, nullable=True) - discord_name = Column(Text, nullable=True) - option = Column(Text, nullable=True) - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'id': self.id, - 'created_at': self.created_at, - 'discord_id': self.discord_id, - 'discord_name': self.discord_name, - 'option': self.option - } - -class GitHubProfileData(Base): - __tablename__ = 'github_profile_data' - - github_username = Column(String, primary_key=True) - discord_id = Column(BigInteger, nullable=False) - classroom_points = Column(Integer, nullable=False, default=0) - prs_raised = Column(Integer, nullable=False, default=0) - prs_reviewed = Column(Integer, nullable=False, default=0) - prs_merged = Column(Integer, nullable=False, default=0) - dpg_points = Column(Integer, nullable=False, default=0) - milestone = Column(Integer, nullable=False, default=0) - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'github_username': self.github_username, - 'discord_id': self.discord_id, - 'classroom_points': self.classroom_points, - 'prs_raised': self.prs_raised, - 'prs_reviewed': self.prs_reviewed, - 'prs_merged': self.prs_merged, - 'dpg_points': self.dpg_points, - 'milestone': self.milestone, - } - -class CommunityOrgs(Base): - __tablename__ = 'community_orgs' - - id = Column(BigInteger, primary_key=True, autoincrement=True) - name = Column(Text, nullable=True) - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'id': self.id, - 'name': self.name - } - - - -class ContributorPoints(Base): - __tablename__ = 'contributor_points' - - id = Column(BigInteger, primary_key=True, autoincrement=True) - contributors_id = Column(BigInteger, ForeignKey('contributors_registration.id'), nullable=True) - total_points = Column(Integer, nullable=False, default=0) - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'id': self.id, - 'contributors_id': self.contributors_id, - 'total_points': self.total_points - } - -class MentorNotAdded(Base): - __tablename__ = 'mentor_not_added' - - id = Column(BigInteger, primary_key=True, autoincrement=True) - mentor_github_id = Column(BigInteger, nullable=True) - issue_id = Column(BigInteger, nullable=True) - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'id': self.id, - 'mentor_github_id': self.mentor_github_id, - 'issue_id': self.issue_id - } - - - -class Leaderboard(Base): - __tablename__ = 'leaderboard' - - discord_id = Column(BigInteger, primary_key=True, autoincrement=False) - github_id = Column(BigInteger, nullable=False) - github_url = Column(Text, nullable=False) - apprentice_badge = Column(Boolean, nullable=True) - converser_badge = Column(Boolean, nullable=False, default=False) - rockstar_badge = Column(Boolean, nullable=False, default=False) - enthusiast_badge = Column(Boolean, nullable=False, default=False) - rising_star_badge = Column(Boolean, nullable=False, default=False) - github_x_discord_badge = Column(Boolean, nullable=False, default=False) - points = Column(Integer, nullable=False, default=0) - bronze_badge = Column(Boolean, nullable=False, default=False) - silver_badge = Column(Boolean, nullable=False, default=False) - gold_badge = Column(Boolean, nullable=False, default=False) - ruby_badge = Column(Boolean, nullable=False, default=False) - diamond_badge = Column(Boolean, nullable=False, default=False) - certificate_link = Column(Text, nullable=True) - - def __repr__(self): - return f"" - - def to_dict(self): - return { - 'discord_id': self.discord_id, - 'github_id': self.github_id, - 'github_url': self.github_url, - 'apprentice_badge': self.apprentice_badge, - 'converser_badge': self.converser_badge, - 'rockstar_badge': self.rockstar_badge, - 'enthusiast_badge': self.enthusiast_badge, - 'rising_star_badge': self.rising_star_badge, - 'github_x_discord_badge': self.github_x_discord_badge, - 'points': self.points, - 'bronze_badge': self.bronze_badge, - 'silver_badge': self.silver_badge, - 'gold_badge': self.gold_badge, - 'ruby_badge': self.ruby_badge, - 'diamond_badge': self.diamond_badge, - 'certificate_link': self.certificate_link - } \ No newline at end of file diff --git a/shared_migrations/db/server.py b/shared_migrations/db/server.py deleted file mode 100644 index e621c65..0000000 --- a/shared_migrations/db/server.py +++ /dev/null @@ -1,909 +0,0 @@ -import dotenv -import os -## -from sqlalchemy.future import select -from sqlalchemy.orm import sessionmaker, aliased -from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession -from sqlalchemy.pool import NullPool -from sqlalchemy.ext.declarative import DeclarativeMeta -from .models import Base, ContributorsRegistration,GithubClassroomData, IssueContributors -from sqlalchemy import delete, insert -from sqlalchemy import select, asc, desc,update, join -from sqlalchemy.exc import IntegrityError -from sqlalchemy.sql import exists -from datetime import datetime -from sqlalchemy import cast, String ,and_ -from sqlalchemy.dialects.postgresql import ARRAY -from .models import Issues, CommunityOrgs, PointSystem, PrHistory - - -class ServerQueries: - - def convert_dict(self,data): - try: - if type(data) == list: - data = [val.to_dict() for val in data] - else: - return [data.to_dict()] - - return data - except Exception as e: - print(e) - raise Exception - - - def get_class_by_tablename(self,tablename): - try: - for cls in Base.registry._class_registry.values(): - if isinstance(cls, DeclarativeMeta): - if hasattr(cls, '__tablename__') and cls.__tablename__ == tablename: - return cls - return None - except Exception as e: - print(f"ERROR get_class_by_tablename - {e}") - return None - - async def readAll(self,table_class): - try: - table = self.get_class_by_tablename(table_class) - # Query all records from the specified table class - async with self.session() as session: - stmt = select(table) - result = await session.execute(stmt) - - data = result.scalars().all() - result = self.convert_dict(data) - return result - except Exception as e: - print(f"An error occurred -read_all_from_table : {e}") - return None - - - async def deleteComment(self,issue_id,table_name): - try: - table = self.get_class_by_tablename(table_name) - async with self.session() as session: - stmt = delete(table).where(table.issue_id == issue_id) - await session.execute(stmt) - await session.commit() - - return True - - except Exception as e: - print(f"An error occurred - deleteComment: {e}") - return False - - async def read(self, table, filters=None, select_columns=None, order=None, limit=None, offset=None): - """ - Reads data from a table in the database using SQLAlchemy ORM. - """ - try: - table_class = self.get_class_by_tablename(table) - - # Select specific columns or all columns if None - if select_columns: - stmt = select([getattr(table_class, col) for col in select_columns]) - else: - stmt = select(table_class) - - # Apply filters - if filters: - for column, condition in filters.items(): - if isinstance(condition, tuple) and len(condition) == 2: - operation, value = condition - col_attr = getattr(table_class, column) - if operation == 'gt': - stmt = stmt.where(col_attr > value) - elif operation == 'lt': - stmt = stmt.where(col_attr < value) - elif operation == 'gte': - stmt = stmt.where(col_attr >= value) - elif operation == 'lte': - stmt = stmt.where(col_attr <= value) - else: - stmt = stmt.where(getattr(table_class, column) == condition) - - # Apply ordering - if order: - for column, direction in order.items(): - if direction == 'asc': - stmt = stmt.order_by(asc(getattr(table_class, column))) - elif direction == 'desc': - stmt = stmt.order_by(desc(getattr(table_class, column))) - - # Apply limit - if limit: - stmt = stmt.limit(limit) - - # Apply offset - if offset: - stmt = stmt.offset(offset) - - async with self.session() as session: - result = await session.execute(stmt) - data = result.scalars().all() - - # Convert result to dictionary - return [row.to_dict() for row in data] - - except Exception as e: - print(f"An error occurred - read: {e}") - return None - - - async def add_discord_metrics(self, discord_metrics): - try: - async with self.session() as session: - DiscordMetrics = self.get_class_by_tablename("discord_metrics") - - for metric in discord_metrics: - stmt = select(DiscordMetrics).where(DiscordMetrics.product_name == metric["product_name"]) - result = await session.execute(stmt) - existing_record = result.scalars().first() - - if existing_record: - update_stmt = ( - update(DiscordMetrics) - .where(DiscordMetrics.product_name == metric["product_name"]) - .values( - mentor_messages=metric["mentor_messages"], - contributor_messages=metric["contributor_messages"] - ) - .returning(DiscordMetrics) - ) - updated_data = await session.execute(update_stmt) - data = updated_data.scalars().first() - else: - new_record = DiscordMetrics(**metric) - session.add(new_record) - await session.commit() - await session.refresh(new_record) - data = new_record - - await session.commit() - return data - - except IntegrityError as e: - print(f"An error occurred: {e}") - await session.rollback() - return None - - async def add_github_metrics(self, github_metrics): - try: - async with self.session() as session: - for metric in github_metrics: - GithubMetrics = self.get_class_by_tablename("github_metrics") - - # Check if the metric already exists in the database - stmt = select(GithubMetrics).where(GithubMetrics.product_name == metric["product_name"]) - result = await session.execute(stmt) - existing_record = result.scalars().first() - - if existing_record: - update_data = {key: value for key, value in metric.items() if key != "product_name"} - - update_stmt = ( - update(GithubMetrics) - .where(GithubMetrics.product_name == metric["product_name"]) - .values(update_data) - .returning(GithubMetrics) - ) - updated_data = await session.execute(update_stmt) - data = updated_data.scalars().first() - else: - # Insert the new metric if it doesn't exist - new_record = GithubMetrics(**metric) - session.add(new_record) - await session.commit() - await session.refresh(new_record) - data = new_record - - await session.commit() - return data - - except IntegrityError as e: - print(f"An error occurred: {e}") - await session.rollback() - return None - - async def check_exists(self,discord_id, assignment_id): - try: - # Construct the query for check exists - async with self.session() as session: - stmt = ( - select(exists() - .where((GithubClassroomData.discord_id.is_(None)) | (GithubClassroomData.discord_id == discord_id)) - .where(GithubClassroomData.assignment_id == assignment_id) - ) - ) - result = await session.execute(stmt) - exists_result = result.scalar() - - return exists_result - - except Exception as e: - print(f"An error occurred: {e}") - return None - - async def save_classroom_records(self, data): - try: - async with self.session() as session: - for record in data: - try: - new_record = GithubClassroomData( - **record) - session.add(new_record) - - await session.commit() - print("Record inserting successfully!") - except Exception as e: - await session.rollback() - print("Error updating record:", e) - - return True - except Exception as e: - print(f"An error occurred save_classroom_records: {e}") - return False - - async def update_classroom_records(self, data): - async with self.session() as session: - for record in data: - try: - stmt = ( - update(GithubClassroomData). - where( - GithubClassroomData.assignment_id == record.get('assignment_id'), - GithubClassroomData.discord_id == cast(str(record.get('discord_id')),String) - ). - values( - assignment_name=record.get('assignment', {}).get('title'), - assignment_url=record.get('assignment', {}).get('classroom', {}).get('url'), - c4gt_points=record.get('c4gt_points'), - github_username=record.get('students', [{}])[0].get('login'), - points_available=record.get('points_available'), - points_awarded=record.get('points_awarded',0), - roster_identifier=record.get('roster_identifier',""), - starter_code_url=record.get('starter_code_url', record.get('repository', {}).get('html_url')), - student_repository_name=record.get('repository', {}).get('full_name'), - student_repository_url=record.get('repository', {}).get('html_url'), - submission_timestamp=record.get('submission_timestamsp', datetime.now()), - updated_at=record.get('updated_at') - ) - ) - result = await session.execute(stmt) - await session.commit() - print("Record updated successfully!") - return True - except Exception as e: - await session.rollback() - print("Error updating record:", e) - return False - - async def getdiscord_from_cr(self,github_url): - try: - Table = self.get_class_by_tablename("contributors_registration") - async with self.session() as session: - stmt = (select(Table.discord_id).where(Table.github_url == github_url)) - result = await session.execute(stmt) - exists_result = result.scalar() - - return exists_result - except Exception as e: - print("Error - getdiscord_from_cr:", e) - return None - - - async def add_data(self, data: dict, table_name: str): - try: - table_class = self.get_class_by_tablename(table_name) - if not table_class: - raise ValueError(f"Table class for {table_name} not found") - - async with self.session() as session: - new_record = table_class(**data) - session.add(new_record) - await session.commit() - await session.refresh(new_record) - - return new_record - except Exception as e: - print("Error - add_data:", e) - return None - - async def insert_org(self, name): - try: - async with self.session() as session: - table = self.get_class_by_tablename("community_orgs") - if not table: - raise ValueError(f"No ORM class found for table community_orgs") - - stmt = insert(table).values( - name=name - ).returning(table) - - result = await session.execute(stmt) - - await session.commit() - inserted_record = result.fetchone() - print("inserted_record ", {"id": inserted_record[0], "name": inserted_record[1]}) - return {"id": inserted_record[0], "name": inserted_record[1]} - - except Exception as e: - print(f"Error in record_created_ticket method: {e}") - return None - - - - async def check_record_exists(self, table_name, filter_column, filter_value): - try: - table_class = self.get_class_by_tablename(table_name) - if not table_class: - raise ValueError(f"No ORM class found for table '{table_name}'") - - async with self.session() as session: - stmt = ( - select(table_class) - .where(getattr(table_class, filter_column) == filter_value) - ) - result = await session.execute(stmt) - exists = result.scalars().first() is not None - return True if exists else False - except Exception as e: - print(f"An error occurred - check_record_exists: {e}") - return False - - - async def delete(self,table_name, filter_column, filter_value): - try: - table = self.get_class_by_tablename(table_name) - async with self.session() as session: - stmt = delete(table).where(getattr(table, filter_column) == filter_value) - await session.execute(stmt) - await session.commit() - return True - - except Exception as e: - print(f"An error occurred - delete: {e}") - return False - - - async def get_data(self,col_name,table_name,value,condition=None): - try: - Table = self.get_class_by_tablename(table_name) - async with self.session() as session: - stmt = (select(Table).where(getattr(Table, col_name) == value)) - # Execute the query - result = await session.execute(stmt) - exists_result = result.scalar() - if exists_result: - return self.convert_dict(exists_result) - else: - return None - - except Exception as e: - print(f"An error occurred - get_data: {e}") - return None - - async def checkIsTicket(self, issue_id): - try: - tables_to_check = ['issues'] - - async with self.session() as session: - data = [] - for table_name in tables_to_check: - table_class = self.get_class_by_tablename(table_name) - if not table_class: - continue - stmt = select(table_class).where(getattr(table_class, 'issue_id') == issue_id) - result = await session.execute(stmt) - records = result.scalars().all() - - if records: - data.extend(records) - # Check if data was found in any of the tables - if len(data) > 0: - return True - else: - return False - except Exception as e: - print(f"An error occurred - check_is_ticket: {e}") - return False - - - async def record_created_ticket(self, data,table_name): - try: - async with self.session() as session: - # Dynamically get the ORM class for the table - table = self.get_class_by_tablename(table_name) - - stmt = insert(table).values( - link=data['link'], - labels=cast(data['labels'], ARRAY(String)), # Cast to ARRAY type - complexity=data['complexity'], - technology=data['technology'], - status=data['status'], - created_at=data['created_at'], - updated_at=data['updated_at'], - title=data['title'], - domain=data['domain'], - description=f"{data['description']}", - org_id=data['org_id'], - issue_id=data['issue_id'], - project_type=data['project_type'] - ).returning(table) - - result = await session.execute(stmt) - - await session.commit() - - return result - - except Exception as e: - print(f"Error in record_created_ticket method: {e}") - return None - - - async def record_updated_ticket(self, data, table_name): - try: - async with self.session() as session: - # Dynamically get the ORM class for the table - table = self.get_class_by_tablename(table_name) - - # Build the update query - stmt = ( - update(table) - .where(table.issue_id == data['issue_id']) # Match the existing issue by issue_id - .values( - link=data['link'], - labels=cast(data['labels'], ARRAY(String)), # Cast to ARRAY type - complexity=data['complexity'], - technology=data['technology'], - status=data['status'], - created_at=data['created_at'], - updated_at=data['updated_at'], - title=data['title'], - description=f"{data['description']}", - org_id=data['org_id'] - ) - .returning(table) # Return the updated row(s) - ) - - # Execute the update statement - result = await session.execute(stmt) - - # Commit the transaction - await session.commit() - - return result - except Exception as e: - print(f"Error in record_updated_ticket method: {e}") - return None - - - async def update_data(self, data, col_name, table_name): - try: - table_class = self.get_class_by_tablename(table_name) - - async with self.session() as session: - stmt = ( - update(table_class) - .where(getattr(table_class, col_name) == data[col_name]) - .values(**data) - .returning(table_class) - ) - - result = await session.execute(stmt) - await session.commit() - - updated_record = result.scalars().first() - # Convert the updated record to a dictionary before returning - return self.convert_dict(updated_record) if updated_record else None - - except Exception as e: - print(f"Error in update_data: {e}") - return None - - - async def update_pr_data(self, data, table_name): - try: - table_class = self.get_class_by_tablename(table_name) - - async with self.session() as session: - new_pr_history = PrHistory( - created_at= data['created_at'], - api_url=data['api_url'], - html_url= data['html_url'], - raised_by= data['raised_by'], - raised_at= data['raised_at'], - raised_by_username= data['raised_by_username'], - status= data['status'], - is_merged= data['is_merged'], - merged_by= data['merged_by'], - merged_at= data['merged_at'], - merged_by_username= data['merged_by_username'], - pr_id= data['pr_id'] - ) - stmt = ( - update(table_class) - .where(table_class.pr_id == data['pr_id']) # Match the existing issue by issue_id - .values( - - ) - .returning(table_class) # Return the updated row(s) - ) - - # Execute the update statement - result = await session.execute(stmt) - - # Commit the transaction - await session.commit() - - # Optionally fetch the updated record(s) - updated_record = result.fetchone() - - return updated_record if updated_record else None - - except Exception as e: - print(f"Error in update_data: {e}") - return None - - - async def update_pr_history(self, pr_id, data): - try: - async with self.session() as session: - # Query for the existing record based on pr_id (or some unique identifier) - stmt = select(PrHistory).where(PrHistory.pr_id == pr_id) - result = await session.execute(stmt) - pr_history_record = result.scalars().first() - - if pr_history_record: - # Update the fields with new values from data - pr_history_record.created_at = data['created_at'] - pr_history_record.api_url = data['api_url'] - pr_history_record.html_url = data['html_url'] - pr_history_record.raised_by = data['raised_by'] - pr_history_record.raised_at = data['raised_at'] - pr_history_record.raised_by_username = data['raised_by_username'] - pr_history_record.status = data['status'] - pr_history_record.is_merged = data['is_merged'] - pr_history_record.merged_by = data['merged_by'] - pr_history_record.merged_at = None if data['merged_at'] is None else data['merged_at'] - pr_history_record.merged_by_username = data['merged_by_username'] - pr_history_record.ticket_url = data['ticket_url'] - pr_history_record.ticket_complexity = data['ticket_complexity'] - - # Commit the changes to the database - await session.commit() - - # Optionally refresh the object - await session.refresh(pr_history_record) - - return pr_history_record - else: - print(f"Record with pr_id {pr_id} not found") - return None - - except Exception as e: - print(f"Error in update_pr_history: {e}") - return None - - - async def addPr(self, prData, issue_id): - try: - if issue_id: - ticket = await self.get_data("issue_id","issues",issue_id,None) - if len(ticket) ==0: - ticket = await self.get_data("issue_id","dmp_tickets",issue_id,None) - - for pr in prData: - data = { - "html_url":pr["html_url"], - "pr_id":pr["pr_id"], - "raised_by":pr["raised_by"], - "raised_at":pr["raised_at"], - "raised_by_username":pr["raised_by_username"], - "status":pr["status"], - "is_merged":pr["is_merged"] if pr.get("is_merged") else None, - "merged_by":pr["merged_by"] if pr["merged_by"] else None, - "merged_by_username":pr["merged_by_username"] if pr.get("merged_by_username") else None, - "merged_at":pr["merged_at"] if pr.get("merged_at") else None, - "points": ticket[0]["ticket_points"] if issue_id else 0, - "ticket_url":ticket[0]["api_endpoint_url"] if issue_id else 0 - } - resp = await self.add_data(data,"connected_prs") - - return True - except Exception as e: - print(f"Error in addPr: {e}") - return None - - - async def get_issue_from_issue_id(self,issue_id): - try: - async with self.session() as session: - # Dynamically get the ORM class for the table - table = self.get_class_by_tablename("issues") - - # Build and execute the query to check if the issue_id already exists - stmt = select(table).where(table.issue_id == issue_id) - result = await session.execute(stmt) - issues = result.scalars().first() - - if issues: - return self.convert_dict(issues) - return None - - except Exception as e: - print(f"Error in get_issue_from_issue_id method: {e}") - return None - - async def get_contributors_from_issue_id(self,issue_id): - try: - async with self.session() as session: - # Dynamically get the ORM class for the table - table = self.get_class_by_tablename("issue_contributors") - - # Build and execute the query to check if the issue_id already exists - stmt = select(table).where(table.issue_id == issue_id) - result = await session.execute(stmt) - issues = result.scalars().all() - - if issues: - return self.convert_dict(issues) - return None - - except Exception as e: - print(f"Error in get_contributors_from_issue_id method: {e}") - return None - - async def get_pointsby_complexity(self, complexity_type,type="Contributor"): - try: - async with self.session() as session: - # Dynamically get the ORM class for the table - table = self.get_class_by_tablename("points_mapping") - - # Build and execute the query with multiple conditions - stmt = select(table).where( - and_( - table.complexity == complexity_type, - table.role == type - ) - ) - result = await session.execute(stmt) - points = result.scalars().all() - return points[0].points if points else 0 - - except Exception as e: - print(f"Error in get_pointsby_complexity method: {e}") - return None - - async def upsert_point_transaction(self, issue_id, user_id, points,user_type="Contributor"): - try: - async with self.session() as session: - table = self.get_class_by_tablename("point_transactions") - column_map = { - "Contributor": table.user_id, - "Mentor": table.mentor_id, - } - chosen_column = column_map.get(user_type) - stmt = select(table).where( - and_( - table.issue_id == issue_id, - chosen_column == user_id - ) - ) - - result = await session.execute(stmt) - transaction = result.scalars().one_or_none() - - if transaction: - # Record exists, so update the points column - update_stmt = ( - update(table) - .where(and_(table.issue_id == issue_id, table.user_id == user_id)) - .values(point=points) - ) - await session.execute(update_stmt) - await session.commit() - return True - - else: - # Record does not exist, so create a new one - new_transaction = table(issue_id=issue_id,point=points) - setattr(new_transaction, chosen_column.key, user_id) - session.add(new_transaction) - await session.commit() - return True - - except Exception as e: - print(f"Error in upsert_point_transaction method: {e}") - return None - - async def save_user_points(self, user_id, points,user_type="Contributor"): - try: - async with self.session() as session: - table = self.get_class_by_tablename("user_points_mapping") - column_map = { - "Contributor": table.contributor, - "Mentor": table.mentor_id, - } - chosen_column = column_map.get(user_type) - stmt = select(table).where(chosen_column == user_id) - - result = await session.execute(stmt) - transaction = result.scalars().one_or_none() - - - if transaction: - addon_points = points + transaction.points - update_stmt = ( - update(table) - .where(chosen_column == user_id) - .values(points=addon_points) - ) - await session.execute(update_stmt) - await session.commit() - return True - - else: - # Record does not exist, so create a new one - new_transaction = table(points=points) - setattr(new_transaction, chosen_column.key, user_id) - session.add(new_transaction) - await session.commit() - return True - - except Exception as e: - print(f"Error in save_user_points method: {e}") - return None - - - async def deleteIssueComment(self, commentId): - try: - async with self.session() as session: - # Dynamically get the ORM class for the table - table = self.get_class_by_tablename("ticket_comments") - - # Build and execute the query with multiple conditions - stmt = delete(table).where( - getattr(table, "id") == commentId - ) - result = await session.execute(stmt) - is_deleted = result.scalars().all() - return is_deleted - except Exception as e: - print(f"Error in deleting issue comments: {e}") - return None - - - async def getUserLeaderBoardData(self): - try: - async with self.session() as session: - orgs_alias = aliased(CommunityOrgs) - points_alias = aliased(PointSystem) - - # Join the Issues table with the CommunityOrgs and PointSystem - stmt = ( - select(Issues, orgs_alias, points_alias) - .join(orgs_alias, Issues.org_id == orgs_alias.id, isouter=True) # Left join with CommunityOrgs - .join(points_alias, Issues.complexity == points_alias.complexity, isouter=True) # Left join with PointSystem - ) - - # Execute the statement - result = await session.execute(stmt) - - # Fetch all the results - records = result.all() - - # Convert to dictionary format for readability (if needed) - return [ - { - 'issue': issue.to_dict(), - 'community_org': org.to_dict() if org else None, - 'point_system': points.to_dict() if points else None - } - for issue, org, points in records - ] - except Exception as e: - print('Exception occured while getting users leaderboard data ', e) - return None - - - async def get_joined_data_with_filters(self, filters=None): - async with self.session() as session: - # Aliases for the tables - issues = aliased(Issues) - orgs = aliased(CommunityOrgs) - points = aliased(PointSystem) - - # Base query with the join - query = select( - issues, - orgs, - points - ).join( - orgs, issues.org_id == orgs.id - ).join( - points, points.complexity == issues.complexity - ) - - # If dynamic filters are provided, apply them - if filters: - filter_conditions = [] - for field, value in filters.items(): - filter_conditions.append(getattr(issues, field) == value) - - query = query.where(and_(*filter_conditions)) - - # Execute the query and return the results - result = await session.execute(query) - records = result.all() - - # Convert results to dictionaries if necessary - return [dict(issue=record[0].to_dict(), org=record[1].to_dict(), points=record[2].to_dict()) for record in records] - - async def fetch_filtered_issues(self, filters): - try: - async with self.session() as session: - # Start building the query by joining tables - query = ( - select(Issues, CommunityOrgs, PointSystem, IssueContributors, ContributorsRegistration) - .join(CommunityOrgs, Issues.org_id == CommunityOrgs.id) - .join(PointSystem, Issues.complexity == PointSystem.complexity) - .outerjoin(IssueContributors, Issues.id == IssueContributors.issue_id) - .outerjoin(ContributorsRegistration, IssueContributors.contributor_id == ContributorsRegistration.id) - .where(Issues.complexity != 'Beginner') - .order_by(desc(Issues.id)) - ) - - # Prepare dynamic filter conditions - conditions = [] - - # Check if there are filters for Issues table - if 'issues' in filters: - for field, value in filters['issues'].items(): - conditions.append(getattr(Issues, field) == value) - - # Check if there are filters for CommunityOrgs table - if 'org' in filters: - for field, value in filters['org'].items(): - conditions.append(getattr(CommunityOrgs, field) == value) - - # Check if there are filters for PointSystem table - if 'points' in filters: - for field, value in filters['points'].items(): - conditions.append(getattr(PointSystem, field) == value) - - # Apply filters (if any) to the query - if conditions: - query = query.where(and_(*conditions)) - - # Execute the query and fetch results - result = await session.execute(query) - rows = result.fetchall() - - # Process the result into a dictionary or a preferred format - data = [] - for row in rows: - issue = row.Issues.to_dict() - org = row.CommunityOrgs.to_dict() if row.CommunityOrgs else None - point_system = row.PointSystem.to_dict() - contributors_registration = row.ContributorsRegistration.to_dict() if row.ContributorsRegistration else None - data.append({ - 'issue': issue, - 'org': org, - 'points': point_system, - 'contributors_registration': contributors_registration - }) - - return data - - except Exception as e: - print(f"Error in fetch_filtered_issues: {e}") - return None - - - def add_github_user(self, user): - data = self.client.table("contributors_registration").upsert(user, on_conflict=["github_id", "discord_id"]).execute() - return data.data - diff --git a/shared_migrations/migrations/README b/shared_migrations/migrations/README deleted file mode 100644 index 98e4f9c..0000000 --- a/shared_migrations/migrations/README +++ /dev/null @@ -1 +0,0 @@ -Generic single-database configuration. \ No newline at end of file diff --git a/shared_migrations/migrations/env.py b/shared_migrations/migrations/env.py deleted file mode 100644 index 43002a6..0000000 --- a/shared_migrations/migrations/env.py +++ /dev/null @@ -1,83 +0,0 @@ -from logging.config import fileConfig - -from sqlalchemy import engine_from_config -from sqlalchemy import pool -from db.models import shared_metadata, Base - -from alembic import context - -# this is the Alembic Config object, which provides -# access to the values within the .ini file in use. -config = context.config - -from dotenv import load_dotenv -import os - -load_dotenv() -url = os.getenv("DATABASE_URL") -config.set_main_option("sqlalchemy.url", url) - -# Interpret the config file for Python logging. -# This line sets up loggers basically. -if config.config_file_name is not None: - fileConfig(config.config_file_name) - -# add your model's MetaData object here -# for 'autogenerate' support -target_metadata = Base.metadata - -# other values from the config, defined by the needs of env.py, -# can be acquired: -# my_important_option = config.get_main_option("my_important_option") -# ... etc. - - -def run_migrations_offline() -> None: - """Run migrations in 'offline' mode. - - This configures the context with just a URL - and not an Engine, though an Engine is acceptable - here as well. By skipping the Engine creation - we don't even need a DBAPI to be available. - - Calls to context.execute() here emit the given string to the - script output. - - """ - url = config.get_main_option("sqlalchemy.url") - context.configure( - url=url, - target_metadata=target_metadata, - literal_binds=True, - dialect_opts={"paramstyle": "named"}, - ) - - with context.begin_transaction(): - context.run_migrations() - - -def run_migrations_online() -> None: - """Run migrations in 'online' mode. - - In this scenario we need to create an Engine - and associate a connection with the context. - - """ - engine = engine_from_config( - config.get_section(config.config_ini_section), prefix='sqlalchemy.') - - with engine.connect() as connection: - context.configure( - connection=connection, - target_metadata=target_metadata, - compare_type=True - ) - - with context.begin_transaction(): - context.run_migrations() - - -if context.is_offline_mode(): - run_migrations_offline() -else: - run_migrations_online() diff --git a/shared_migrations/migrations/script.py.mako b/shared_migrations/migrations/script.py.mako deleted file mode 100644 index fbc4b07..0000000 --- a/shared_migrations/migrations/script.py.mako +++ /dev/null @@ -1,26 +0,0 @@ -"""${message} - -Revision ID: ${up_revision} -Revises: ${down_revision | comma,n} -Create Date: ${create_date} - -""" -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa -${imports if imports else ""} - -# revision identifiers, used by Alembic. -revision: str = ${repr(up_revision)} -down_revision: Union[str, None] = ${repr(down_revision)} -branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} -depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} - - -def upgrade() -> None: - ${upgrades if upgrades else "pass"} - - -def downgrade() -> None: - ${downgrades if downgrades else "pass"} diff --git a/shared_migrations/migrations/versions/8d1e6a7e959a_initial_migration.py b/shared_migrations/migrations/versions/8d1e6a7e959a_initial_migration.py deleted file mode 100644 index db77404..0000000 --- a/shared_migrations/migrations/versions/8d1e6a7e959a_initial_migration.py +++ /dev/null @@ -1,1723 +0,0 @@ -"""Initial migration - -Revision ID: 8d1e6a7e959a -Revises: -Create Date: 2024-12-18 18:12:00.911503 - -""" -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision: str = '8d1e6a7e959a' -down_revision: Union[str, None] = None -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.create_table('github_profile_data', - sa.Column('github_username', sa.String(), nullable=False), - sa.Column('discord_id', sa.BigInteger(), nullable=False), - sa.Column('classroom_points', sa.Integer(), nullable=False), - sa.Column('prs_raised', sa.Integer(), nullable=False), - sa.Column('prs_reviewed', sa.Integer(), nullable=False), - sa.Column('prs_merged', sa.Integer(), nullable=False), - sa.Column('dpg_points', sa.Integer(), nullable=False), - sa.Column('milestone', sa.Integer(), nullable=False), - sa.PrimaryKeyConstraint('github_username') - ) - op.create_table('leaderboard', - sa.Column('discord_id', sa.BigInteger(), autoincrement=False, nullable=False), - sa.Column('github_id', sa.BigInteger(), nullable=False), - sa.Column('github_url', sa.Text(), nullable=False), - sa.Column('apprentice_badge', sa.Boolean(), nullable=True), - sa.Column('converser_badge', sa.Boolean(), nullable=False), - sa.Column('rockstar_badge', sa.Boolean(), nullable=False), - sa.Column('enthusiast_badge', sa.Boolean(), nullable=False), - sa.Column('rising_star_badge', sa.Boolean(), nullable=False), - sa.Column('github_x_discord_badge', sa.Boolean(), nullable=False), - sa.Column('points', sa.Integer(), nullable=False), - sa.Column('bronze_badge', sa.Boolean(), nullable=False), - sa.Column('silver_badge', sa.Boolean(), nullable=False), - sa.Column('gold_badge', sa.Boolean(), nullable=False), - sa.Column('ruby_badge', sa.Boolean(), nullable=False), - sa.Column('diamond_badge', sa.Boolean(), nullable=False), - sa.Column('certificate_link', sa.Text(), nullable=True), - sa.PrimaryKeyConstraint('discord_id') - ) - op.create_table('role_master', - sa.Column('id', sa.BigInteger(), nullable=False), - sa.Column('created_at', db.models.DateTime(), nullable=False), - sa.Column('updated_at', db.models.DateTime(), nullable=True), - sa.Column('role', sa.Text(), nullable=True), - sa.PrimaryKeyConstraint('id') - ) - op.create_table('unstructured_discord_data', - sa.Column('text', sa.Text(), nullable=True), - sa.Column('author', sa.BigInteger(), nullable=True), - sa.Column('channel', sa.BigInteger(), nullable=True), - sa.Column('channel_name', sa.Text(), nullable=True), - sa.Column('uuid', sa.String(length=36), nullable=False), - sa.Column('author_name', sa.Text(), nullable=True), - sa.Column('author_roles', sa.Text(), nullable=True), - sa.Column('sent_at', sa.Text(), nullable=True), - sa.PrimaryKeyConstraint('uuid') - ) - op.create_table('user_points_mapping', - sa.Column('id', sa.UUID(), nullable=False), - sa.Column('contributor', sa.BigInteger(), nullable=True), - sa.Column('points', sa.Integer(), nullable=False), - sa.Column('level', sa.String(length=50), nullable=True), - sa.Column('created_at', db.models.DateTime(), nullable=False), - sa.Column('updated_at', db.models.DateTime(), nullable=False), - sa.Column('mentor_id', sa.BigInteger(), nullable=True), - sa.ForeignKeyConstraint(['contributor'], ['contributors_registration.id'], ), - sa.ForeignKeyConstraint(['mentor_id'], ['mentor_details.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.drop_table('__contributors_vc') - op.drop_table('__mentors') - op.drop_table('__mentorship_program_ticket_comments') - op.drop_table('__mentorship_program_pull_request') - op.drop_table('__mentorship_program_tickets') - op.drop_table('__community_program_unique_user_data') - op.drop_table('__contributors_discord') - op.drop_table('__applicant') - op.drop_table('__dashboard_config') - op.drop_table('__mentorship_program_projects') - op.drop_table('__comments') - op.drop_table('__dev_onboarding') - op.drop_table('contributors_registration_old') - op.drop_table('__pull_requests') - op.drop_table('__community_program_tickets') - op.drop_table('__community_organisations') - op.drop_table('__mentorship_program_selected_contributors') - op.drop_table('__community_program_product_wise_tickets') - op.drop_table('unstructured discord data') - op.alter_column('app_comments', 'id', - existing_type=sa.UUID(), - type_=sa.BigInteger(), - existing_nullable=False, - autoincrement=True, - existing_server_default=sa.text('gen_random_uuid()')) - op.alter_column('app_comments', 'updated_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.alter_column('app_comments', 'issue_id', - existing_type=sa.BIGINT(), - nullable=True) - op.alter_column('badges', 'id', - existing_type=sa.INTEGER(), - type_=sa.UUID(), - existing_nullable=False) - op.alter_column('badges', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('badges', 'updated_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('ccbp_tickets', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.alter_column('ccbp_tickets', 'issue_id', - existing_type=sa.BIGINT(), - nullable=True) - op.alter_column('ccbp_tickets', 'index', - existing_type=sa.SMALLINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.alter_column('ccbp_tickets', 'closed_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_comment='date-time at which issue was closed', - existing_nullable=True) - op.alter_column('chapters', 'org_name', - existing_type=sa.TEXT(), - nullable=True) - op.alter_column('chapters', 'discord_role_id', - existing_type=sa.BIGINT(), - nullable=True, - comment='db id of the corresponding member role in discord server', - existing_comment='db od of the corresponding member role in discord server') - op.alter_column('chapters', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('community_orgs', 'name', - existing_type=sa.TEXT(), - nullable=True) - op.alter_column('connected_prs', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.alter_column('connected_prs', 'raised_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=False) - op.alter_column('connected_prs', 'merged_at', - existing_type=postgresql.TIMESTAMP(), - type_=sa.Text(), - existing_nullable=True) - op.alter_column('contributor_names', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.add_column('contributor_points', sa.Column('contributors_id', sa.BigInteger(), nullable=True)) - op.drop_constraint('contributor_points_contributors_id_fkey', 'contributor_points', type_='foreignkey') - op.create_foreign_key(None, 'contributor_points', 'contributors_registration', ['contributors_id'], ['id']) - op.drop_column('contributor_points', 'user_id') - op.alter_column('contributors_discord', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.alter_column('contributors_discord', 'joined_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=False, - existing_server_default=sa.text('now()')) - op.drop_column('contributors_discord', 'city') - op.drop_column('contributors_discord', 'country') - op.drop_column('contributors_discord', 'experience') - op.alter_column('contributors_registration', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.alter_column('contributors_registration', 'joined_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=False, - existing_server_default=sa.text('now()')) - op.drop_table_comment( - 'contributors_registration', - existing_comment='This is a duplicate of contributors_registration_old', - schema=None - ) - op.add_column('discord_engagement', sa.Column('converserbadge', sa.Boolean(), nullable=True)) - op.add_column('discord_engagement', sa.Column('apprenticebadge', sa.Boolean(), nullable=True)) - op.add_column('discord_engagement', sa.Column('rockstarbadge', sa.Boolean(), nullable=True)) - op.add_column('discord_engagement', sa.Column('enthusiastbadge', sa.Boolean(), nullable=True)) - op.add_column('discord_engagement', sa.Column('risingstarbadge', sa.Boolean(), nullable=True)) - op.alter_column('discord_engagement', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.alter_column('discord_engagement', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.drop_column('discord_engagement', 'apprenticeBadge') - op.drop_column('discord_engagement', 'converserBadge') - op.drop_column('discord_engagement', 'risingStarBadge') - op.drop_column('discord_engagement', 'enthusiastBadge') - op.drop_column('discord_engagement', 'rockstarBadge') - op.alter_column('dmp_issue_updates', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=False, - existing_server_default=sa.text('now()')) - op.alter_column('dmp_issue_updates', 'comment_updated_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.drop_constraint('dmp_issue_updates_comment_id_key', 'dmp_issue_updates', type_='unique') - op.drop_constraint('dmp_issue_updates_dmp_id_fkey', 'dmp_issue_updates', type_='foreignkey') - op.create_foreign_key(None, 'dmp_issue_updates', 'dmp_issues', ['dmp_id'], ['id']) - op.alter_column('dmp_issues', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.drop_constraint('dmp_issues_dmp_id_key', 'dmp_issues', type_='unique') - op.drop_constraint('dmp_issues_org_id_fkey', 'dmp_issues', type_='foreignkey') - op.create_foreign_key(None, 'dmp_issues', 'dmp_orgs', ['org_id'], ['id']) - op.drop_column('dmp_issues', 'repo_owner') - op.add_column('dmp_orgs', sa.Column('version', sa.Text(), nullable=True)) - op.alter_column('dmp_orgs', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.alter_column('dmp_orgs', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=False, - existing_server_default=sa.text('now()')) - op.drop_constraint('dmp_orgs_id_key', 'dmp_orgs', type_='unique') - op.alter_column('dmp_pr_updates', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=False, - existing_server_default=sa.text('now()')) - op.alter_column('dmp_pr_updates', 'pr_updated_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('dmp_pr_updates', 'merged_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('dmp_pr_updates', 'closed_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.drop_constraint('dmp_pr_updates_pr_id_key', 'dmp_pr_updates', type_='unique') - op.drop_constraint('dmp_pr_updates_dmp_id_fkey', 'dmp_pr_updates', type_='foreignkey') - op.create_foreign_key(None, 'dmp_pr_updates', 'dmp_issues', ['dmp_id'], ['id']) - op.alter_column('dmp_tickets', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.alter_column('dmp_tickets', 'ticket_points', - existing_type=sa.SMALLINT(), - type_=sa.Integer(), - existing_comment='How many points the ticket is worth', - existing_nullable=True, - existing_server_default=sa.text("'0'::smallint")) - op.alter_column('dmp_tickets', 'index', - existing_type=sa.SMALLINT(), - server_default=None, - type_=sa.Integer(), - existing_nullable=False, - autoincrement=True) - op.alter_column('dmp_week_updates', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.drop_constraint('dmp_week_updates_dmp_id_fkey', 'dmp_week_updates', type_='foreignkey') - op.alter_column('github_classroom_data', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.alter_column('github_classroom_data', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=False, - existing_server_default=sa.text('now()')) - op.alter_column('github_classroom_data', 'submission_timestamp', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=False) - op.alter_column('github_classroom_data', 'points_awarded', - existing_type=sa.VARCHAR(), - type_=sa.Integer(), - existing_nullable=True) - op.alter_column('github_classroom_data', 'points_available', - existing_type=sa.VARCHAR(), - type_=sa.Integer(), - existing_nullable=True) - op.create_table_comment( - 'github_classroom_data', - 'Table for saving the details about github classroom assignment data', - existing_comment='Table for save the details about github classroom assignment datas', - schema=None - ) - op.alter_column('github_installations', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.alter_column('github_installations', 'github_ids', - existing_type=postgresql.JSON(astext_type=sa.Text()), - type_=sa.Text(), - comment="Identifiers on the github database, prolly won't be used", - existing_comment="identifiers on the github database, prolly won't be used", - existing_nullable=True) - op.alter_column('github_installations', 'permissions_and_events', - existing_type=postgresql.JSON(astext_type=sa.Text()), - type_=sa.Text(), - existing_nullable=True) - op.alter_column('github_installations', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.drop_constraint('github_installations_organisation_fkey', 'github_installations', type_='foreignkey') - op.create_foreign_key(None, 'github_installations', 'community_orgs', ['organisation'], ['name']) - op.alter_column('github_organisations_to_organisations', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.alter_column('github_organisations_to_organisations', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - comment='Creation date of organization ticket', - existing_comment='creation date of organization ticket', - existing_nullable=True) - op.alter_column('issue_contributors', 'id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False, - autoincrement=True) - op.alter_column('issue_contributors', 'contributor_id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - nullable=True) - op.alter_column('issue_contributors', 'issue_id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False) - op.alter_column('issue_contributors', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('issue_contributors', 'updated_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.drop_constraint('unique_issue_id_contributors', 'issue_contributors', type_='unique') - op.drop_constraint('issue_contributors_contributor_id_fkey', 'issue_contributors', type_='foreignkey') - op.create_foreign_key(None, 'issue_contributors', 'contributors_registration', ['contributor_id'], ['id']) - op.create_foreign_key(None, 'issue_contributors', 'role_master', ['role'], ['id']) - op.alter_column('issue_mentors', 'id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False, - autoincrement=True, - existing_server_default=sa.text("nextval('issue_mentors_id_seq'::regclass)")) - op.alter_column('issue_mentors', 'issue_id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - nullable=True) - op.alter_column('issue_mentors', 'angel_mentor_id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=True) - op.drop_constraint('unique_issue_id_mentors', 'issue_mentors', type_='unique') - op.alter_column('issues', 'id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False, - autoincrement=True, - existing_server_default=sa.text("nextval('issues_id_seq'::regclass)")) - op.drop_constraint('issues_org_id_fkey', 'issues', type_='foreignkey') - op.create_foreign_key(None, 'issues', 'community_orgs', ['org_id'], ['id']) - op.alter_column('mentor_details', 'id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False, - autoincrement=True, - existing_server_default=sa.text("nextval('mentor_details_id_seq'::regclass)")) - op.alter_column('mentor_not_added', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.add_column('mentorship_program_site_structure', sa.Column('product_id', sa.BigInteger(), nullable=True)) - op.add_column('mentorship_program_site_structure', sa.Column('project_id', sa.BigInteger(), nullable=True)) - op.add_column('mentorship_program_site_structure', sa.Column('contributor_id', sa.BigInteger(), nullable=True)) - op.add_column('mentorship_program_site_structure', sa.Column('website_directory_label', sa.Text(), nullable=True)) - op.alter_column('mentorship_program_site_structure', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.drop_constraint('mentorship_program_site_structure_project_fkey', 'mentorship_program_site_structure', type_='foreignkey') - op.drop_constraint('mentorship_program_site_structure_product_fkey', 'mentorship_program_site_structure', type_='foreignkey') - op.drop_constraint('mentorship_program_site_structure_contributor_fkey', 'mentorship_program_site_structure', type_='foreignkey') - op.create_foreign_key(None, 'mentorship_program_site_structure', 'product', ['product_id'], ['id']) - op.drop_table_comment( - 'mentorship_program_site_structure', - existing_comment='a mapping for the milestones website structure', - schema=None - ) - op.drop_column('mentorship_program_site_structure', 'project') - op.drop_column('mentorship_program_site_structure', 'product') - op.drop_column('mentorship_program_site_structure', 'website directory_label') - op.drop_column('mentorship_program_site_structure', 'contributor') - op.alter_column('mentorship_program_website_comments', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('mentorship_program_website_comments', 'updated_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('mentorship_program_website_commits', 'date', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('mentorship_program_website_commits', 'files', - existing_type=postgresql.JSON(astext_type=sa.Text()), - type_=sa.Text(), - existing_nullable=True) - op.add_column('mentorship_program_website_has_updated', sa.Column('project_id', sa.BigInteger(), nullable=True)) - op.alter_column('mentorship_program_website_has_updated', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.alter_column('mentorship_program_website_has_updated', 'week1_update_date', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week2_update_date', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week3_update_date', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week4_update_date', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week5_update_date', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week6_update_date', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week7_update_date', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week8_update_date', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week9_update_date', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.drop_constraint('mentorship_program_website_has_updated_project_fkey', 'mentorship_program_website_has_updated', type_='foreignkey') - op.drop_column('mentorship_program_website_has_updated', 'project') - op.alter_column('point_system', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.alter_column('point_transactions', 'id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False, - autoincrement=True) - op.alter_column('point_transactions', 'user_id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=True) - op.alter_column('point_transactions', 'issue_id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False) - op.alter_column('point_transactions', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - nullable=False, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('point_transactions', 'updated_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - nullable=False, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('point_transactions', 'angel_mentor_id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=True) - op.drop_constraint('point_transactions_user_id_fkey', 'point_transactions', type_='foreignkey') - op.create_foreign_key(None, 'point_transactions', 'mentor_details', ['angel_mentor_id'], ['id']) - op.create_foreign_key(None, 'point_transactions', 'contributors_registration', ['user_id'], ['id']) - op.alter_column('points_mapping', 'id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False, - autoincrement=True) - op.alter_column('points_mapping', 'role', - existing_type=sa.TEXT(), - type_=sa.String(length=50), - nullable=False) - op.alter_column('points_mapping', 'complexity', - existing_type=sa.TEXT(), - type_=sa.String(length=50), - nullable=False) - op.alter_column('points_mapping', 'points', - existing_type=sa.INTEGER(), - nullable=False) - op.alter_column('points_mapping', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('points_mapping', 'updated_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('pr_history', 'id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False, - autoincrement=True, - existing_server_default=sa.text("nextval('pr_history_id_seq'::regclass)")) - op.alter_column('pr_history', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.alter_column('pr_history', 'raised_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=False) - op.alter_column('pr_history', 'pr_id', - existing_type=sa.BIGINT(), - comment=None, - existing_comment='github id of the pr', - existing_nullable=False) - op.drop_table_comment( - 'pr_history', - existing_comment='Holds records of pr webhooks', - schema=None - ) - op.drop_column('pr_history', 'points') - op.alter_column('pr_staging', 'id', - existing_type=sa.UUID(), - type_=sa.String(length=36), - existing_nullable=False, - existing_server_default=sa.text('gen_random_uuid()')) - op.alter_column('pr_staging', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.alter_column('pr_staging', 'raised_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=False) - op.alter_column('pr_staging', 'pr_id', - existing_type=sa.BIGINT(), - comment=None, - existing_comment='github id of the pr', - existing_nullable=False) - op.drop_table_comment( - 'pr_staging', - existing_comment='This is a duplicate of connected_prs', - schema=None - ) - op.add_column('product', sa.Column('channel_id', sa.BigInteger(), nullable=True)) - op.alter_column('product', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.alter_column('product', 'description', - existing_type=sa.TEXT(), - comment=None, - existing_comment='URL to the product entry on C4GT wiki', - existing_nullable=True, - existing_server_default=sa.text("''::text")) - op.drop_constraint('product_channel_fkey', 'product', type_='foreignkey') - op.create_foreign_key(None, 'product', 'discord_channels', ['channel_id'], ['channel_id']) - op.drop_table_comment( - 'product', - existing_comment="A table containing all 'Products' in C4GT 2023", - schema=None - ) - op.drop_column('product', 'channel') - op.alter_column('ticket_comments', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('ticket_comments', 'updated_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True) - op.alter_column('unlisted_tickets', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.alter_column('unlisted_tickets', 'ticket_points', - existing_type=sa.SMALLINT(), - comment=None, - existing_comment='How many points the ticket is worth', - existing_nullable=True, - existing_server_default=sa.text("'0'::smallint")) - op.alter_column('unlisted_tickets', 'index', - existing_type=sa.SMALLINT(), - server_default=None, - existing_nullable=False) - op.alter_column('unlisted_tickets', 'uuid', - existing_type=sa.UUID(), - type_=sa.String(length=36), - existing_nullable=False, - existing_server_default=sa.text('gen_random_uuid()')) - op.create_unique_constraint(None, 'unlisted_tickets', ['uuid', 'issue_id']) - op.add_column('user_activity', sa.Column('contributor_id', sa.BigInteger(), nullable=False)) - op.alter_column('user_activity', 'id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False, - autoincrement=True) - op.alter_column('user_activity', 'issue_id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False) - op.alter_column('user_activity', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('user_activity', 'updated_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('user_activity', 'mentor_id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=True) - op.drop_constraint('user_activity_user_id_fkey', 'user_activity', type_='foreignkey') - op.drop_constraint('user_activity_mentor_id_fkey', 'user_activity', type_='foreignkey') - op.create_foreign_key(None, 'user_activity', 'contributors_registration', ['contributor_id'], ['id']) - op.create_foreign_key(None, 'user_activity', 'mentor_details', ['mentor_id'], ['id']) - op.drop_column('user_activity', 'user_id') - op.alter_column('user_badges', 'id', - existing_type=sa.INTEGER(), - type_=sa.UUID(), - existing_nullable=False) - op.alter_column('user_badges', 'user_id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False) - op.alter_column('user_badges', 'badge_id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False) - op.alter_column('user_badges', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('user_badges', 'updated_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('user_certificates', 'id', - existing_type=sa.INTEGER(), - type_=sa.UUID(), - existing_nullable=False) - op.alter_column('user_certificates', 'user_id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False) - op.alter_column('user_certificates', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('user_certificates', 'updated_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('users', 'id', - existing_type=sa.INTEGER(), - type_=sa.BigInteger(), - existing_nullable=False, - autoincrement=True, - existing_server_default=sa.text("nextval('users_id_seq'::regclass)")) - op.alter_column('users', 'name', - existing_type=sa.TEXT(), - nullable=True) - op.alter_column('users', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('users', 'updated_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.create_unique_constraint(None, 'users', ['discord']) - op.alter_column('vc_logs', 'id', - existing_type=sa.BIGINT(), - server_default=None, - existing_nullable=False, - autoincrement=True) - op.alter_column('vc_logs', 'created_at', - existing_type=postgresql.TIMESTAMP(timezone=True), - type_=db.models.DateTime(), - existing_nullable=False, - existing_server_default=sa.text('now()')) - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.alter_column('vc_logs', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=False, - existing_server_default=sa.text('now()')) - op.alter_column('vc_logs', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.drop_constraint(None, 'users', type_='unique') - op.alter_column('users', 'updated_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('users', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('users', 'name', - existing_type=sa.TEXT(), - nullable=False) - op.alter_column('users', 'id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False, - autoincrement=True, - existing_server_default=sa.text("nextval('users_id_seq'::regclass)")) - op.alter_column('user_certificates', 'updated_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('user_certificates', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('user_certificates', 'user_id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False) - op.alter_column('user_certificates', 'id', - existing_type=sa.UUID(), - type_=sa.INTEGER(), - existing_nullable=False) - op.alter_column('user_badges', 'updated_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('user_badges', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('user_badges', 'badge_id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False) - op.alter_column('user_badges', 'user_id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False) - op.alter_column('user_badges', 'id', - existing_type=sa.UUID(), - type_=sa.INTEGER(), - existing_nullable=False) - op.add_column('user_activity', sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=False)) - op.drop_constraint(None, 'user_activity', type_='foreignkey') - op.drop_constraint(None, 'user_activity', type_='foreignkey') - op.create_foreign_key('user_activity_mentor_id_fkey', 'user_activity', 'mentor_details', ['mentor_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE') - op.create_foreign_key('user_activity_user_id_fkey', 'user_activity', 'users', ['user_id'], ['id']) - op.alter_column('user_activity', 'mentor_id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=True) - op.alter_column('user_activity', 'updated_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('user_activity', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('user_activity', 'issue_id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False) - op.alter_column('user_activity', 'id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False, - autoincrement=True) - op.drop_column('user_activity', 'contributor_id') - op.drop_constraint(None, 'unlisted_tickets', type_='unique') - op.alter_column('unlisted_tickets', 'uuid', - existing_type=sa.String(length=36), - type_=sa.UUID(), - existing_nullable=False, - existing_server_default=sa.text('gen_random_uuid()')) - op.alter_column('unlisted_tickets', 'index', - existing_type=sa.SMALLINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=32767, cycle=False, cache=1), - existing_nullable=False) - op.alter_column('unlisted_tickets', 'ticket_points', - existing_type=sa.SMALLINT(), - comment='How many points the ticket is worth', - existing_nullable=True, - existing_server_default=sa.text("'0'::smallint")) - op.alter_column('unlisted_tickets', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.alter_column('ticket_comments', 'updated_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('ticket_comments', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.add_column('product', sa.Column('channel', sa.BIGINT(), autoincrement=False, nullable=True)) - op.create_table_comment( - 'product', - "A table containing all 'Products' in C4GT 2023", - existing_comment=None, - schema=None - ) - op.drop_constraint(None, 'product', type_='foreignkey') - op.create_foreign_key('product_channel_fkey', 'product', 'discord_channels', ['channel'], ['channel_id']) - op.alter_column('product', 'description', - existing_type=sa.TEXT(), - comment='URL to the product entry on C4GT wiki', - existing_nullable=True, - existing_server_default=sa.text("''::text")) - op.alter_column('product', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.drop_column('product', 'channel_id') - op.create_table_comment( - 'pr_staging', - 'This is a duplicate of connected_prs', - existing_comment=None, - schema=None - ) - op.alter_column('pr_staging', 'pr_id', - existing_type=sa.BIGINT(), - comment='github id of the pr', - existing_nullable=False) - op.alter_column('pr_staging', 'raised_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=False) - op.alter_column('pr_staging', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.alter_column('pr_staging', 'id', - existing_type=sa.String(length=36), - type_=sa.UUID(), - existing_nullable=False, - existing_server_default=sa.text('gen_random_uuid()')) - op.add_column('pr_history', sa.Column('points', sa.SMALLINT(), server_default=sa.text("'10'::smallint"), autoincrement=False, nullable=False)) - op.create_table_comment( - 'pr_history', - 'Holds records of pr webhooks', - existing_comment=None, - schema=None - ) - op.alter_column('pr_history', 'pr_id', - existing_type=sa.BIGINT(), - comment='github id of the pr', - existing_nullable=False) - op.alter_column('pr_history', 'raised_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=False) - op.alter_column('pr_history', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.alter_column('pr_history', 'id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False, - autoincrement=True, - existing_server_default=sa.text("nextval('pr_history_id_seq'::regclass)")) - op.alter_column('points_mapping', 'updated_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('points_mapping', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('points_mapping', 'points', - existing_type=sa.INTEGER(), - nullable=True) - op.alter_column('points_mapping', 'complexity', - existing_type=sa.String(length=50), - type_=sa.TEXT(), - nullable=True) - op.alter_column('points_mapping', 'role', - existing_type=sa.String(length=50), - type_=sa.TEXT(), - nullable=True) - op.alter_column('points_mapping', 'id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False, - autoincrement=True) - op.drop_constraint(None, 'point_transactions', type_='foreignkey') - op.drop_constraint(None, 'point_transactions', type_='foreignkey') - op.create_foreign_key('point_transactions_user_id_fkey', 'point_transactions', 'contributors_registration', ['user_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE') - op.alter_column('point_transactions', 'angel_mentor_id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=True) - op.alter_column('point_transactions', 'updated_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('point_transactions', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('point_transactions', 'issue_id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False) - op.alter_column('point_transactions', 'user_id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=True) - op.alter_column('point_transactions', 'id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False, - autoincrement=True) - op.alter_column('point_system', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.add_column('mentorship_program_website_has_updated', sa.Column('project', sa.TEXT(), autoincrement=False, nullable=True)) - op.create_foreign_key('mentorship_program_website_has_updated_project_fkey', 'mentorship_program_website_has_updated', '__mentorship_program_projects', ['project'], ['name']) - op.alter_column('mentorship_program_website_has_updated', 'week9_update_date', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week8_update_date', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week7_update_date', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week6_update_date', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week5_update_date', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week4_update_date', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week3_update_date', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week2_update_date', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'week1_update_date', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('mentorship_program_website_has_updated', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.drop_column('mentorship_program_website_has_updated', 'project_id') - op.alter_column('mentorship_program_website_commits', 'files', - existing_type=sa.Text(), - type_=postgresql.JSON(astext_type=sa.Text()), - existing_nullable=True) - op.alter_column('mentorship_program_website_commits', 'date', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('mentorship_program_website_comments', 'updated_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('mentorship_program_website_comments', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.add_column('mentorship_program_site_structure', sa.Column('contributor', sa.TEXT(), autoincrement=False, nullable=True)) - op.add_column('mentorship_program_site_structure', sa.Column('website directory_label', sa.TEXT(), autoincrement=False, nullable=True)) - op.add_column('mentorship_program_site_structure', sa.Column('product', sa.TEXT(), autoincrement=False, nullable=True)) - op.add_column('mentorship_program_site_structure', sa.Column('project', sa.TEXT(), autoincrement=False, nullable=True)) - op.create_table_comment( - 'mentorship_program_site_structure', - 'a mapping for the milestones website structure', - existing_comment=None, - schema=None - ) - op.drop_constraint(None, 'mentorship_program_site_structure', type_='foreignkey') - op.create_foreign_key('mentorship_program_site_structure_contributor_fkey', 'mentorship_program_site_structure', '__mentorship_program_selected_contributors', ['contributor'], ['name']) - op.create_foreign_key('mentorship_program_site_structure_product_fkey', 'mentorship_program_site_structure', 'product', ['product'], ['name']) - op.create_foreign_key('mentorship_program_site_structure_project_fkey', 'mentorship_program_site_structure', '__mentorship_program_projects', ['project'], ['name']) - op.alter_column('mentorship_program_site_structure', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.drop_column('mentorship_program_site_structure', 'website_directory_label') - op.drop_column('mentorship_program_site_structure', 'contributor_id') - op.drop_column('mentorship_program_site_structure', 'project_id') - op.drop_column('mentorship_program_site_structure', 'product_id') - op.alter_column('mentor_not_added', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=True, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.alter_column('mentor_details', 'id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False, - autoincrement=True, - existing_server_default=sa.text("nextval('mentor_details_id_seq'::regclass)")) - op.drop_constraint(None, 'issues', type_='foreignkey') - op.create_foreign_key('issues_org_id_fkey', 'issues', 'community_orgs', ['org_id'], ['id'], onupdate='CASCADE', ondelete='SET NULL') - op.alter_column('issues', 'id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False, - autoincrement=True, - existing_server_default=sa.text("nextval('issues_id_seq'::regclass)")) - op.create_unique_constraint('unique_issue_id_mentors', 'issue_mentors', ['issue_id']) - op.alter_column('issue_mentors', 'angel_mentor_id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=True) - op.alter_column('issue_mentors', 'issue_id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - nullable=False) - op.alter_column('issue_mentors', 'id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False, - autoincrement=True, - existing_server_default=sa.text("nextval('issue_mentors_id_seq'::regclass)")) - op.drop_constraint(None, 'issue_contributors', type_='foreignkey') - op.drop_constraint(None, 'issue_contributors', type_='foreignkey') - op.create_foreign_key('issue_contributors_contributor_id_fkey', 'issue_contributors', 'contributors_registration', ['contributor_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE') - op.create_unique_constraint('unique_issue_id_contributors', 'issue_contributors', ['issue_id']) - op.alter_column('issue_contributors', 'updated_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('issue_contributors', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('issue_contributors', 'issue_id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False) - op.alter_column('issue_contributors', 'contributor_id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - nullable=False) - op.alter_column('issue_contributors', 'id', - existing_type=sa.BigInteger(), - type_=sa.INTEGER(), - existing_nullable=False, - autoincrement=True) - op.alter_column('github_organisations_to_organisations', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - comment='creation date of organization ticket', - existing_comment='Creation date of organization ticket', - existing_nullable=True) - op.alter_column('github_organisations_to_organisations', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.drop_constraint(None, 'github_installations', type_='foreignkey') - op.create_foreign_key('github_installations_organisation_fkey', 'github_installations', '__community_organisations', ['organisation'], ['name'], onupdate='CASCADE') - op.alter_column('github_installations', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('github_installations', 'permissions_and_events', - existing_type=sa.Text(), - type_=postgresql.JSON(astext_type=sa.Text()), - existing_nullable=True) - op.alter_column('github_installations', 'github_ids', - existing_type=sa.Text(), - type_=postgresql.JSON(astext_type=sa.Text()), - comment="identifiers on the github database, prolly won't be used", - existing_comment="Identifiers on the github database, prolly won't be used", - existing_nullable=True) - op.alter_column('github_installations', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.create_table_comment( - 'github_classroom_data', - 'Table for save the details about github classroom assignment datas', - existing_comment='Table for saving the details about github classroom assignment data', - schema=None - ) - op.alter_column('github_classroom_data', 'points_available', - existing_type=sa.Integer(), - type_=sa.VARCHAR(), - existing_nullable=True) - op.alter_column('github_classroom_data', 'points_awarded', - existing_type=sa.Integer(), - type_=sa.VARCHAR(), - existing_nullable=True) - op.alter_column('github_classroom_data', 'submission_timestamp', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=False) - op.alter_column('github_classroom_data', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=False, - existing_server_default=sa.text('now()')) - op.alter_column('github_classroom_data', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.create_foreign_key('dmp_week_updates_dmp_id_fkey', 'dmp_week_updates', 'dmp_issues', ['dmp_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE') - op.alter_column('dmp_week_updates', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.alter_column('dmp_tickets', 'index', - existing_type=sa.Integer(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=32767, cycle=False, cache=1), - type_=sa.SMALLINT(), - existing_nullable=False, - autoincrement=True) - op.alter_column('dmp_tickets', 'ticket_points', - existing_type=sa.Integer(), - type_=sa.SMALLINT(), - existing_comment='How many points the ticket is worth', - existing_nullable=True, - existing_server_default=sa.text("'0'::smallint")) - op.alter_column('dmp_tickets', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.drop_constraint(None, 'dmp_pr_updates', type_='foreignkey') - op.create_foreign_key('dmp_pr_updates_dmp_id_fkey', 'dmp_pr_updates', 'dmp_issues', ['dmp_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE') - op.create_unique_constraint('dmp_pr_updates_pr_id_key', 'dmp_pr_updates', ['pr_id']) - op.alter_column('dmp_pr_updates', 'closed_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('dmp_pr_updates', 'merged_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('dmp_pr_updates', 'pr_updated_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('dmp_pr_updates', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=False, - existing_server_default=sa.text('now()')) - op.create_unique_constraint('dmp_orgs_id_key', 'dmp_orgs', ['id']) - op.alter_column('dmp_orgs', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=False, - existing_server_default=sa.text('now()')) - op.alter_column('dmp_orgs', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.drop_column('dmp_orgs', 'version') - op.add_column('dmp_issues', sa.Column('repo_owner', sa.TEXT(), autoincrement=False, nullable=True)) - op.drop_constraint(None, 'dmp_issues', type_='foreignkey') - op.create_foreign_key('dmp_issues_org_id_fkey', 'dmp_issues', 'dmp_orgs', ['org_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE') - op.create_unique_constraint('dmp_issues_dmp_id_key', 'dmp_issues', ['id']) - op.alter_column('dmp_issues', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.drop_constraint(None, 'dmp_issue_updates', type_='foreignkey') - op.create_foreign_key('dmp_issue_updates_dmp_id_fkey', 'dmp_issue_updates', 'dmp_issues', ['dmp_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE') - op.create_unique_constraint('dmp_issue_updates_comment_id_key', 'dmp_issue_updates', ['comment_id']) - op.alter_column('dmp_issue_updates', 'comment_updated_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('dmp_issue_updates', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=False, - existing_server_default=sa.text('now()')) - op.add_column('discord_engagement', sa.Column('rockstarBadge', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True)) - op.add_column('discord_engagement', sa.Column('enthusiastBadge', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True)) - op.add_column('discord_engagement', sa.Column('risingStarBadge', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True)) - op.add_column('discord_engagement', sa.Column('converserBadge', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True)) - op.add_column('discord_engagement', sa.Column('apprenticeBadge', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True)) - op.alter_column('discord_engagement', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.alter_column('discord_engagement', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.drop_column('discord_engagement', 'risingstarbadge') - op.drop_column('discord_engagement', 'enthusiastbadge') - op.drop_column('discord_engagement', 'rockstarbadge') - op.drop_column('discord_engagement', 'apprenticebadge') - op.drop_column('discord_engagement', 'converserbadge') - op.create_table_comment( - 'contributors_registration', - 'This is a duplicate of contributors_registration_old', - existing_comment=None, - schema=None - ) - op.alter_column('contributors_registration', 'joined_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=False, - existing_server_default=sa.text('now()')) - op.alter_column('contributors_registration', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.add_column('contributors_discord', sa.Column('experience', sa.TEXT(), autoincrement=False, nullable=True)) - op.add_column('contributors_discord', sa.Column('country', sa.TEXT(), autoincrement=False, nullable=True)) - op.add_column('contributors_discord', sa.Column('city', sa.TEXT(), autoincrement=False, nullable=True)) - op.alter_column('contributors_discord', 'joined_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=False, - existing_server_default=sa.text('now()')) - op.alter_column('contributors_discord', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.add_column('contributor_points', sa.Column('user_id', sa.BIGINT(), autoincrement=False, nullable=True)) - op.drop_constraint(None, 'contributor_points', type_='foreignkey') - op.create_foreign_key('contributor_points_contributors_id_fkey', 'contributor_points', 'contributors_registration', ['user_id'], ['id']) - op.drop_column('contributor_points', 'contributors_id') - op.alter_column('contributor_names', 'id', - existing_type=sa.BIGINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.alter_column('connected_prs', 'merged_at', - existing_type=sa.Text(), - type_=postgresql.TIMESTAMP(), - existing_nullable=True) - op.alter_column('connected_prs', 'raised_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=False) - op.alter_column('connected_prs', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.alter_column('community_orgs', 'name', - existing_type=sa.TEXT(), - nullable=False) - op.alter_column('chapters', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True) - op.alter_column('chapters', 'discord_role_id', - existing_type=sa.BIGINT(), - nullable=False, - comment='db od of the corresponding member role in discord server', - existing_comment='db id of the corresponding member role in discord server') - op.alter_column('chapters', 'org_name', - existing_type=sa.TEXT(), - nullable=False) - op.alter_column('ccbp_tickets', 'closed_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_comment='date-time at which issue was closed', - existing_nullable=True) - op.alter_column('ccbp_tickets', 'index', - existing_type=sa.SMALLINT(), - server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=32767, cycle=False, cache=1), - existing_nullable=False, - autoincrement=True) - op.alter_column('ccbp_tickets', 'issue_id', - existing_type=sa.BIGINT(), - nullable=False) - op.alter_column('ccbp_tickets', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.alter_column('badges', 'updated_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('badges', 'created_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('CURRENT_TIMESTAMP')) - op.alter_column('badges', 'id', - existing_type=sa.UUID(), - type_=sa.INTEGER(), - existing_nullable=False) - op.alter_column('app_comments', 'issue_id', - existing_type=sa.BIGINT(), - nullable=False) - op.alter_column('app_comments', 'updated_at', - existing_type=db.models.DateTime(), - type_=postgresql.TIMESTAMP(timezone=True), - existing_nullable=True, - existing_server_default=sa.text('now()')) - op.alter_column('app_comments', 'id', - existing_type=sa.BigInteger(), - type_=sa.UUID(), - existing_nullable=False, - autoincrement=True, - existing_server_default=sa.text('gen_random_uuid()')) - op.create_table('unstructured discord data', - sa.Column('text', sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column('author', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('channel', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('channel_name', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('uuid', sa.UUID(), server_default=sa.text('gen_random_uuid()'), autoincrement=False, nullable=False), - sa.Column('author_name', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('author_roles', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), - sa.Column('sent_at', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('uuid', name='unstructured discord data_duplicate_pkey'), - sa.UniqueConstraint('uuid', name='unstructured discord data_duplicate_uuid_key') - ) - op.create_table('__community_program_product_wise_tickets', - sa.Column('url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('repository_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('comments_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('events_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('html_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('node_id', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('title', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('raised_by_username', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('raised_by_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('labels', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('status', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('assignees', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('number_of_comments', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('closed_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('community_label', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True), - sa.Column('product', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('gh_organisation', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('repository name', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('organisation', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='community_program_tickets_duplicate_pkey') - ) - op.create_table('__mentorship_program_selected_contributors', - sa.Column('id', sa.UUID(), server_default=sa.text('gen_random_uuid()'), autoincrement=False, nullable=False), - sa.Column('name', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('github_username', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('project_name', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('ticket_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='mentorship_program_selected_contributors_pkey'), - sa.UniqueConstraint('name', name='mentorship_program_selected_contributors_name_key'), - comment='List of contributors selected for C4GT Mentorship Program 2023' - ) - op.create_table('__community_organisations', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('name', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='organisations_pkey'), - sa.UniqueConstraint('name', name='organisations_name_key'), - postgresql_ignore_search_path=False - ) - op.create_table('__community_program_tickets', - sa.Column('url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('repository_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('comments_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('events_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('html_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('node_id', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('title', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('raised_by_username', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('raised_by_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('labels', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('status', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('assignees', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('number_of_comments', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('closed_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('community_label', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='community_program_tickets_pkey') - ) - op.create_table('__pull_requests', - sa.Column('id', sa.UUID(), server_default=sa.text('gen_random_uuid()'), autoincrement=False, nullable=False), - sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=True), - sa.Column('api_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('html_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('raised_by', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('raised_at', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('raised_by_username', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('status', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('is_merged', sa.BOOLEAN(), autoincrement=False, nullable=True), - sa.Column('merged_by', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('merged_at', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('merged_by_username', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('pr_id', sa.BIGINT(), autoincrement=False, nullable=False, comment='github id of the pr'), - sa.Column('points', sa.SMALLINT(), server_default=sa.text("'10'::smallint"), autoincrement=False, nullable=False), - sa.Column('ticket_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='pull_requests_pkey1'), - sa.UniqueConstraint('html_url', name='pull_requests_html_url_key'), - sa.UniqueConstraint('pr_id', name='pull_requests_pr_id_key') - ) - op.create_table('contributors_registration_old', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('discord_id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('github_id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('github_url', sa.VARCHAR(), autoincrement=False, nullable=False), - sa.Column('discord_username', sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column('joined_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=False), - sa.Column('email', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('name', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='contributors_registration_duplicate_pkey'), - sa.UniqueConstraint('discord_id', name='contributors_registration_duplicate_discord_id_key'), - sa.UniqueConstraint('github_id', name='contributors_registration_duplicate_github_id_key') - ) - op.create_table('__dev_onboarding', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('organisation', sa.TEXT(), autoincrement=False, nullable=False), - sa.Column('email', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('repos', postgresql.JSON(astext_type=sa.Text()), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='Onboarding_Dev_pkey'), - sa.UniqueConstraint('organisation', name='Onboarding_Dev_org_key') - ) - op.create_table('__comments', - sa.Column('id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('html_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('issue_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('node_id', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('commented_by', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('commented_by_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), - sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), - sa.Column('content', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('reactions_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='comments_pkey') - ) - op.create_table('__mentorship_program_projects', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('name', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('description', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('repository', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('product', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('issue_page_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('isssue_api_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('repository_api_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.ForeignKeyConstraint(['product'], ['product.name'], name='__mentorship_program_projects_product_fkey', ondelete='SET DEFAULT'), - sa.PrimaryKeyConstraint('id', name='projects_pkey'), - sa.UniqueConstraint('name', name='projects_name_key'), - comment='Selected projects under C4GT 2023' - ) - op.create_table('__dashboard_config', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('dashboard', sa.TEXT(), autoincrement=False, nullable=False), - sa.Column('starting date', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='dashboard _config_pkey') - ) - op.create_table('__applicant', - sa.Column('id', sa.UUID(), server_default=sa.text('gen_random_uuid()'), autoincrement=False, nullable=False), - sa.Column('sheet_username', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('discord_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='applicant_pkey'), - sa.UniqueConstraint('discord_id', name='applicant_discord_id_key') - ) - op.create_table('__contributors_discord', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('discord_id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('github_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('github_url', sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column('discord_username', sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column('joined_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=False), - sa.Column('email', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column(' name', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('chapter', sa.TEXT(), autoincrement=False, nullable=True, comment="the chapter they're associated with"), - sa.Column('gender', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='__contributors_pkey'), - sa.UniqueConstraint('discord_id', name='__contributors_discord_id_key') - ) - op.create_table('__community_program_unique_user_data', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('ticket_name', sa.TEXT(), autoincrement=False, nullable=False), - sa.Column('status', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('linked_pr', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('linked_pr_author_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('linked_pr_author_username', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('is_registered', sa.BOOLEAN(), autoincrement=False, nullable=True), - sa.Column('ticket_link', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('linked_pr_link', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('merged', sa.BOOLEAN(), autoincrement=False, nullable=True), - sa.Column('state', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='community_program_unique_user_data_pkey') - ) - op.create_table('__mentorship_program_tickets', - sa.Column('url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('repository_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('comments_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('events_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('html_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('node_id', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('title', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('raised_by_username', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('raised_by_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('labels', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('status', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('assignees', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('number_of_comments', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('closed_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='mentorship_program_tickets_pkey') - ) - op.create_table('__mentorship_program_pull_request', - sa.Column('pr_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('pr_id', sa.INTEGER(), autoincrement=False, nullable=False), - sa.Column('pr_node_id', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('html_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('status', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('title', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('raised_by_username', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('raised_by_id', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('body', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('closed_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('merged_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('assignees', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), - sa.Column('requested_reviewers', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), - sa.Column('labels', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True), - sa.Column('review_comments_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('comments_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('repository_id', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('repository_owner_name', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('repository_owner_id', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('repository_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('merged', sa.BOOLEAN(), autoincrement=False, nullable=True), - sa.Column('number_of_commits', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('number_of_comments', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('lines_of_code_added', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('lines_of_code_removed', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('number_of_files_changed', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('merged_by_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('merged_by_username', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('linked_ticket', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('pr_id', name='mentorship_program_pull_request_pkey') - ) - op.create_table('__mentorship_program_ticket_comments', - sa.Column('id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('html_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('issue_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('node_id', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('commented_by', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('commented_by_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), - sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), - sa.Column('content', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('reactions_url', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='mentorship_program_ticket_comments_pkey') - ) - op.create_table('__mentors', - sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False), - sa.Column('discord_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('github_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('github_url', sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column('discord_username', sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column('organisation', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('name', sa.TEXT(), autoincrement=False, nullable=False), - sa.Column('email', sa.TEXT(), autoincrement=False, nullable=True), - sa.ForeignKeyConstraint(['organisation'], ['__community_organisations.name'], name='__mentors_organisation_fkey'), - sa.PrimaryKeyConstraint('id', name='mentors_pkey') - ) - op.create_table('__contributors_vc', - sa.Column('github_username', sa.TEXT(), autoincrement=False, nullable=False), - sa.Column('discord_id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('certificate_link', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('stats', sa.TEXT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('github_username', name='contributors_vc_pkey') - ) - op.drop_table('user_points_mapping') - op.drop_table('unstructured_discord_data') - op.drop_table('role_master') - op.drop_table('leaderboard') - op.drop_table('github_profile_data') - # ### end Alembic commands ### diff --git a/shared_migrations/requirements.txt b/shared_migrations/requirements.txt deleted file mode 100644 index 7b9da297149e49c45bcaa38cb17cd7493e90cebf..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 164 zcmYMuK?;K~5QX7?UFcCN(RSm)h5K%bCy2Jxg2tjE Date: Fri, 3 Jan 2025 14:12:53 +0530 Subject: [PATCH 31/32] Readded Submodule --- .gitmodules | 3 +++ shared_migrations | 1 + 2 files changed, 4 insertions(+) create mode 100644 .gitmodules create mode 160000 shared_migrations diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000..3e0df8d --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "shared_migrations"] + path = shared_migrations + url = https://github.com/Code4GovTech/shared-models-migrations.git diff --git a/shared_migrations b/shared_migrations new file mode 160000 index 0000000..a4d120e --- /dev/null +++ b/shared_migrations @@ -0,0 +1 @@ +Subproject commit a4d120e23849673b439356ecaa75f56f368758e6 From 66f5833bb843eefa34770c7304321aea25c95e71 Mon Sep 17 00:00:00 2001 From: Shreyash Date: Fri, 10 Jan 2025 13:51:42 +0530 Subject: [PATCH 32/32] Cleaned Comments --- models.py | 142 ------------------------------------------------------ 1 file changed, 142 deletions(-) diff --git a/models.py b/models.py index 757bc1c..e69de29 100644 --- a/models.py +++ b/models.py @@ -1,142 +0,0 @@ -# from datetime import datetime -# from sqlalchemy import Column, Integer, String, Text, DateTime, ForeignKey, func,BigInteger -# from sqlalchemy.orm import relationship -# from sqlalchemy.ext.declarative import declarative_base - - -# Base = declarative_base() - - - -# # Define your models -# class DmpIssue(Base): -# __tablename__ = 'dmp_issues' - -# id = Column(Integer, primary_key=True, autoincrement=True) -# issue_url = Column(String, nullable=False) -# issue_number = Column(Integer, nullable=False) -# mentor_username = Column(String, nullable=True) -# contributor_username = Column(String, nullable=True) -# title = Column(String, nullable=False) -# org_id = Column(Integer, ForeignKey('dmp_orgs.id'), nullable=False) -# description = Column(Text, nullable=True) -# repo = Column(String, nullable=True) - -# def __repr__(self): -# return f"" - -# def to_dict(self): -# return { -# 'id': self.id, -# 'issue_url': self.issue_url, -# 'issue_number': self.issue_number, -# 'mentor_username': self.mentor_username, -# 'contributor_username': self.contributor_username, -# 'title': self.title, -# 'org_id': self.org_id, -# 'description': self.description, -# 'repo': self.repo -# } - -# class DmpOrg(Base): -# __tablename__ = 'dmp_orgs' - -# id = Column(Integer, primary_key=True, autoincrement=True) -# created_at = Column(DateTime, default=datetime.utcnow, nullable=False) -# name = Column(String, nullable=False) -# description = Column(Text, nullable=True) -# link = Column(String, nullable=False) -# repo_owner = Column(String, nullable=False) -# dmp_issues = relationship('DmpIssue', backref='organization', lazy=True) - -# def __repr__(self): -# return f"" - -# def to_dict(self): -# return { -# 'id': self.id, -# 'created_at': self.created_at.isoformat(), -# 'name': self.name, -# 'description': self.description, -# 'link': self.link, -# 'repo_owner': self.repo_owner -# } - - -# class DmpIssueUpdate(Base): -# __tablename__ = 'dmp_issue_updates' - -# created_at = Column(DateTime, default=datetime.utcnow, nullable=False) -# body_text = Column(Text, nullable=False) -# comment_link = Column(String, nullable=False) -# comment_id = Column(BigInteger, primary_key=True, nullable=False) -# comment_api = Column(String, nullable=False) -# comment_updated_at = Column(DateTime, nullable=False) -# dmp_id = Column(Integer, ForeignKey('dmp_orgs.id'), nullable=False) -# created_by = Column(String, nullable=False) - -# def __repr__(self): -# return f"" - -# def to_dict(self): -# return { -# 'created_at': self.created_at.isoformat(), -# 'body_text': self.body_text, -# 'comment_link': self.comment_link, -# 'comment_id': self.comment_id, -# 'comment_api': self.comment_api, -# 'comment_updated_at': self.comment_updated_at.isoformat(), -# 'dmp_id': self.dmp_id, -# 'created_by': self.created_by -# } - -# class Prupdates(Base): -# __tablename__ = 'dmp_pr_updates' - -# created_at = Column(DateTime, nullable=False, default=datetime.utcnow) -# pr_id = Column(Integer, nullable=False, primary_key=True) -# status = Column(String, nullable=False) -# title = Column(String, nullable=False) -# pr_updated_at = Column(DateTime, nullable=False, default=datetime.utcnow) -# merged_at = Column(DateTime) -# closed_at = Column(DateTime) -# dmp_id = Column(Integer, ForeignKey('dmp_issues.id'), nullable=False) -# link = Column(String, nullable=False) - -# def __repr__(self): -# return f'' - -# def to_dict(self): -# return { -# 'created_at': self.created_at.isoformat(), -# 'pr_id': self.pr_id, -# 'status': self.status, -# 'title': self.title, -# 'pr_updated_at': self.pr_updated_at.isoformat(), -# 'merged_at': self.merged_at.isoformat() if self.merged_at else None, -# 'closed_at': self.closed_at.isoformat() if self.closed_at else None, -# 'dmp_id': self.dmp_id, -# 'link': self.link -# } - -# class DmpWeekUpdate(Base): -# __tablename__ = 'dmp_week_updates' - -# id = Column(Integer, primary_key=True, autoincrement=True) -# issue_url = Column(String, nullable=False) -# week = Column(Integer, nullable=False) -# total_task = Column(Integer, nullable=False) -# completed_task = Column(Integer, nullable=False) -# progress = Column(Integer, nullable=False) -# task_data = Column(Text, nullable=False) -# dmp_id = Column(Integer, nullable=False) - -# def __repr__(self): -# return f"" - -# def to_dict(self): -# return { -# 'id': self.id, -# 'week': self.week, -# 'dmp_id': self.dmp_id, -# }