From f1bf7bc4a10347281923acd47c1350bdc450c422 Mon Sep 17 00:00:00 2001 From: n2020h <72112832+n2020h@users.noreply.github.com> Date: Mon, 12 Feb 2024 14:01:40 -0800 Subject: [PATCH 01/49] Create issues-to-csv.yml --- .github/workflows/issues-to-csv.yml | 32 +++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) create mode 100644 .github/workflows/issues-to-csv.yml diff --git a/.github/workflows/issues-to-csv.yml b/.github/workflows/issues-to-csv.yml new file mode 100644 index 0000000..69d3576 --- /dev/null +++ b/.github/workflows/issues-to-csv.yml @@ -0,0 +1,32 @@ +name: List Issues and Output as CSV + +on: + pull_request: + +jobs: + list-issues: + runs-on: ubuntu-latest + steps: + - name: List Issues for Repository + id: list_issues + run: | + echo "issue_id,issue_title,issue_state" > issues.csv + PAGE=1 + while :; do + RESPONSE=$(gh api repos/$ORGANIZATION/website/issues -f state=all -f per_page=100 -f page=$PAGE) + echo "$RESPONSE" | jq -r '.[] | "\(.id),\(.title),\(.state)"' >> issues.csv + COUNT=$(echo "$RESPONSE" | jq length) + if [ "$COUNT" -lt 100 ]; then + break + fi + ((PAGE++)) + done + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + ORGANIZATION: 'hackforla' + + - name: Upload CSV as Artifact + uses: actions/upload-artifact@v2 + with: + name: issues.csv + path: issues.csv From 9fe15bc195b9a7191a491c1b1e4968cf85a12be9 Mon Sep 17 00:00:00 2001 From: n2020h <72112832+n2020h@users.noreply.github.com> Date: Mon, 12 Feb 2024 18:30:43 -0800 Subject: [PATCH 02/49] Update issues-to-csv.yml --- .github/workflows/issues-to-csv.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/issues-to-csv.yml b/.github/workflows/issues-to-csv.yml index 69d3576..feaa217 100644 --- a/.github/workflows/issues-to-csv.yml +++ b/.github/workflows/issues-to-csv.yml @@ -1,7 +1,9 @@ name: List Issues and Output as CSV on: - pull_request: + issues: + types: [opened, transferred, assigned] + # pull_request: jobs: list-issues: From ea64d9223571a39d0c8353c71b530d55e59ed6f5 Mon Sep 17 00:00:00 2001 From: n2020h <72112832+n2020h@users.noreply.github.com> Date: Mon, 12 Feb 2024 18:33:45 -0800 Subject: [PATCH 03/49] Update issues-to-csv.yml --- .github/workflows/issues-to-csv.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/issues-to-csv.yml b/.github/workflows/issues-to-csv.yml index feaa217..9a724ec 100644 --- a/.github/workflows/issues-to-csv.yml +++ b/.github/workflows/issues-to-csv.yml @@ -1,9 +1,10 @@ name: List Issues and Output as CSV on: + pull_request: issues: types: [opened, transferred, assigned] - # pull_request: + jobs: list-issues: From 26789e4b682602f92c84e2d0ed069c1dfeda0c21 Mon Sep 17 00:00:00 2001 From: n2020h <72112832+n2020h@users.noreply.github.com> Date: Mon, 12 Feb 2024 19:29:23 -0800 Subject: [PATCH 04/49] Add generate csv.yml --- .github/workflows/issues-to-csv.yml | 66 +++++++++++++++++++++-------- 1 file changed, 49 insertions(+), 17 deletions(-) diff --git a/.github/workflows/issues-to-csv.yml b/.github/workflows/issues-to-csv.yml index 9a724ec..172f0a2 100644 --- a/.github/workflows/issues-to-csv.yml +++ b/.github/workflows/issues-to-csv.yml @@ -3,33 +3,65 @@ name: List Issues and Output as CSV on: pull_request: issues: - types: [opened, transferred, assigned] + type: [opened] jobs: + # Adds newly created issues onto project board in the default column 'New Issue Approval' + # unless overridden when issue has "LA website bot" in title, then 'Questions / In Review' list-issues: runs-on: ubuntu-latest + if: ${{ github.event_name == 'issues' && github.event.action == 'opened' }} + # env: + # COLUMN_NAME: ${{ contains(github.event.issue.title, 'Hack for LA website bot') && 'Questions / In Review' || 'New Issue Approval' }} steps: - name: List Issues for Repository - id: list_issues + # id: list_issues + uses: alex-page/github-project-automation-plus@v0.9.0 + with: + project: Volunteer Dashboard + column: ${{ env.COLUMN_NAME }} + repo-token: ${{ secrets.HACKFORLA_BOT_PA_TOKEN }} + + # Checkout the repo to access the generate_csv.py script + - name: Checkout repository + uses: actions/checkout@v3 + + - name: Generate issues CSV run: | - echo "issue_id,issue_title,issue_state" > issues.csv - PAGE=1 - while :; do - RESPONSE=$(gh api repos/$ORGANIZATION/website/issues -f state=all -f per_page=100 -f page=$PAGE) - echo "$RESPONSE" | jq -r '.[] | "\(.id),\(.title),\(.state)"' >> issues.csv - COUNT=$(echo "$RESPONSE" | jq length) - if [ "$COUNT" -lt 100 ]; then - break - fi - ((PAGE++)) - done + echo "Issue Number,Title,Labels,Projects" > issues.csv + curl -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \ + "https://api.github.com/repos/${{ github.repository }}/issues?state=all&per_page=100" | \ + jq -r '.[] | select(.pull_request == null) | [.number, .title, (.labels | map(.name) | join(",")), (.projectCards | map(.project_url) | join(","))] | @csv' >> issues.csv env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - ORGANIZATION: 'hackforla' - - name: Upload CSV as Artifact - uses: actions/upload-artifact@v2 + - name: Upload CSV as artifact + uses: actions/upload-artifact@v3 with: - name: issues.csv + name: issues-csv path: issues.csv + + # # Setup Python environment + # - name: Set up Python + # uses: actions/setup-python@v4 + # with: + # python-version: '3.8' + + # # Install Python dependencies (if any) + # - name: Install dependencies + # run: pip install -r requirements.txt + + # # Run the script to generate the CSV file + # - name: Generate CSV of issues + # run: python generate_csv.py + # env: + # GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + # OUTPUT_FILENAME: issues.csv + + # # Upload issues.csv as an artifact + # - name: Upload CSV as artifact + # uses: actions/upload-artifact@v3 + # with: + # name: issues-csv + # path: issues.csv From 3368204ac95848d398c7189e0da2a32407d42213 Mon Sep 17 00:00:00 2001 From: n2020h <72112832+n2020h@users.noreply.github.com> Date: Mon, 12 Feb 2024 19:53:14 -0800 Subject: [PATCH 05/49] Removed if statement.yml Workflow was being skipped --- .github/workflows/issues-to-csv.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/issues-to-csv.yml b/.github/workflows/issues-to-csv.yml index 172f0a2..60cf090 100644 --- a/.github/workflows/issues-to-csv.yml +++ b/.github/workflows/issues-to-csv.yml @@ -11,7 +11,7 @@ jobs: # unless overridden when issue has "LA website bot" in title, then 'Questions / In Review' list-issues: runs-on: ubuntu-latest - if: ${{ github.event_name == 'issues' && github.event.action == 'opened' }} + # if: ${{ github.event_name == 'issues' && github.event.action == 'opened' }} # env: # COLUMN_NAME: ${{ contains(github.event.issue.title, 'Hack for LA website bot') && 'Questions / In Review' || 'New Issue Approval' }} steps: From 0fca665df8accb8333c7d3989168029e295f8ea0 Mon Sep 17 00:00:00 2001 From: n2020h <72112832+n2020h@users.noreply.github.com> Date: Mon, 12 Feb 2024 19:58:13 -0800 Subject: [PATCH 06/49] Debug uses statement.yml --- .github/workflows/issues-to-csv.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/issues-to-csv.yml b/.github/workflows/issues-to-csv.yml index 60cf090..8d445c8 100644 --- a/.github/workflows/issues-to-csv.yml +++ b/.github/workflows/issues-to-csv.yml @@ -17,7 +17,7 @@ jobs: steps: - name: List Issues for Repository # id: list_issues - uses: alex-page/github-project-automation-plus@v0.9.0 + # uses: alex-page/github-project-automation-plus@v0.9.0 with: project: Volunteer Dashboard column: ${{ env.COLUMN_NAME }} From cd705b91466999c760892d716cd06cfc3d37078e Mon Sep 17 00:00:00 2001 From: n2020h <72112832+n2020h@users.noreply.github.com> Date: Mon, 26 Feb 2024 09:59:17 -0800 Subject: [PATCH 07/49] Simplified workflow --- .github/workflows/issues-to-csv.yml | 77 ++++++++++++++++++++--------- 1 file changed, 54 insertions(+), 23 deletions(-) diff --git a/.github/workflows/issues-to-csv.yml b/.github/workflows/issues-to-csv.yml index 8d445c8..01aaf9a 100644 --- a/.github/workflows/issues-to-csv.yml +++ b/.github/workflows/issues-to-csv.yml @@ -1,47 +1,78 @@ name: List Issues and Output as CSV on: + issues: + types: [opened, closed, reopened] pull_request: - issues: - type: [opened] - + types: [opened, closed, reopened] jobs: - # Adds newly created issues onto project board in the default column 'New Issue Approval' - # unless overridden when issue has "LA website bot" in title, then 'Questions / In Review' list-issues: runs-on: ubuntu-latest - # if: ${{ github.event_name == 'issues' && github.event.action == 'opened' }} - # env: - # COLUMN_NAME: ${{ contains(github.event.issue.title, 'Hack for LA website bot') && 'Questions / In Review' || 'New Issue Approval' }} steps: - - name: List Issues for Repository - # id: list_issues - # uses: alex-page/github-project-automation-plus@v0.9.0 - with: - project: Volunteer Dashboard - column: ${{ env.COLUMN_NAME }} - repo-token: ${{ secrets.HACKFORLA_BOT_PA_TOKEN }} - - # Checkout the repo to access the generate_csv.py script + # Checkout the repo to access any scripts or tools you might need - name: Checkout repository uses: actions/checkout@v3 - + + # Generate issues CSV - name: Generate issues CSV run: | - echo "Issue Number,Title,Labels,Projects" > issues.csv + echo "Issue Number,Title,Labels" > issues.csv curl -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \ "https://api.github.com/repos/${{ github.repository }}/issues?state=all&per_page=100" | \ - jq -r '.[] | select(.pull_request == null) | [.number, .title, (.labels | map(.name) | join(",")), (.projectCards | map(.project_url) | join(","))] | @csv' >> issues.csv - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + jq -r '.[] | select(.pull_request == null) | [.number, .title, (.labels | map(.name) | join(","))] | @csv' >> issues.csv + # Upload CSV as artifact - name: Upload CSV as artifact - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v2 with: name: issues-csv path: issues.csv +# name: List Issues and Output as CSV + +# on: +# pull_request: +# issues: +# type: [opened] + + +# jobs: +# # Adds newly created issues onto project board in the default column 'New Issue Approval' +# # unless overridden when issue has "LA website bot" in title, then 'Questions / In Review' +# list-issues: +# runs-on: ubuntu-latest +# # if: ${{ github.event_name == 'issues' && github.event.action == 'opened' }} +# # env: +# # COLUMN_NAME: ${{ contains(github.event.issue.title, 'Hack for LA website bot') && 'Questions / In Review' || 'New Issue Approval' }} +# steps: +# - name: List Issues for Repository +# # id: list_issues +# # uses: alex-page/github-project-automation-plus@v0.9.0 +# with: +# project: Volunteer Dashboard +# column: ${{ env.COLUMN_NAME }} +# repo-token: ${{ secrets.HACKFORLA_BOT_PA_TOKEN }} + +# # Checkout the repo to access the generate_csv.py script +# - name: Checkout repository +# uses: actions/checkout@v3 + + # - name: Generate issues CSV + # run: | + # echo "Issue Number,Title,Labels,Projects" > issues.csv + # curl -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \ + # "https://api.github.com/repos/${{ github.repository }}/issues?state=all&per_page=100" | \ + # jq -r '.[] | select(.pull_request == null) | [.number, .title, (.labels | map(.name) | join(",")), (.projectCards | map(.project_url) | join(","))] | @csv' >> issues.csv + # env: + # GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + # - name: Upload CSV as artifact + # uses: actions/upload-artifact@v3 + # with: + # name: issues-csv + # path: issues.csv + # # Setup Python environment # - name: Set up Python # uses: actions/setup-python@v4 From 4e2936aff2931e09b04542eed35c94bd174b13d4 Mon Sep 17 00:00:00 2001 From: n2020h <72112832+n2020h@users.noreply.github.com> Date: Mon, 26 Feb 2024 10:07:23 -0800 Subject: [PATCH 08/49] add new manual trigger --- .github/workflows/issues-to-csv.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/issues-to-csv.yml b/.github/workflows/issues-to-csv.yml index 01aaf9a..7ff5f65 100644 --- a/.github/workflows/issues-to-csv.yml +++ b/.github/workflows/issues-to-csv.yml @@ -5,7 +5,8 @@ on: types: [opened, closed, reopened] pull_request: types: [opened, closed, reopened] - + workflow_dispatch: + jobs: list-issues: runs-on: ubuntu-latest From a81859d6b14100af040127dd61e17de221db1259 Mon Sep 17 00:00:00 2001 From: n2020h <72112832+n2020h@users.noreply.github.com> Date: Mon, 26 Feb 2024 10:44:57 -0800 Subject: [PATCH 09/49] add scheduler for testing --- .github/workflows/issues-to-csv.yml | 76 +++++------------------------ 1 file changed, 11 insertions(+), 65 deletions(-) diff --git a/.github/workflows/issues-to-csv.yml b/.github/workflows/issues-to-csv.yml index 7ff5f65..21b1b39 100644 --- a/.github/workflows/issues-to-csv.yml +++ b/.github/workflows/issues-to-csv.yml @@ -1,11 +1,14 @@ name: List Issues and Output as CSV on: + workflow_dispatch: + schedule: + - cron: '*/10 * * * *' issues: types: [opened, closed, reopened] pull_request: types: [opened, closed, reopened] - workflow_dispatch: + jobs: list-issues: @@ -22,6 +25,13 @@ jobs: curl -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \ "https://api.github.com/repos/${{ github.repository }}/issues?state=all&per_page=100" | \ jq -r '.[] | select(.pull_request == null) | [.number, .title, (.labels | map(.name) | join(","))] | @csv' >> issues.csv + + # - name: Commit CSV to Repository + # uses: stefanzweifel/git-auto-commit-action@v4 + # with: + # commit_message: "Update issues CSV" + # file_pattern: issues.csv + # branch: main # Upload CSV as artifact - name: Upload CSV as artifact @@ -30,70 +40,6 @@ jobs: name: issues-csv path: issues.csv -# name: List Issues and Output as CSV -# on: -# pull_request: -# issues: -# type: [opened] - - -# jobs: -# # Adds newly created issues onto project board in the default column 'New Issue Approval' -# # unless overridden when issue has "LA website bot" in title, then 'Questions / In Review' -# list-issues: -# runs-on: ubuntu-latest -# # if: ${{ github.event_name == 'issues' && github.event.action == 'opened' }} -# # env: -# # COLUMN_NAME: ${{ contains(github.event.issue.title, 'Hack for LA website bot') && 'Questions / In Review' || 'New Issue Approval' }} -# steps: -# - name: List Issues for Repository -# # id: list_issues -# # uses: alex-page/github-project-automation-plus@v0.9.0 -# with: -# project: Volunteer Dashboard -# column: ${{ env.COLUMN_NAME }} -# repo-token: ${{ secrets.HACKFORLA_BOT_PA_TOKEN }} - -# # Checkout the repo to access the generate_csv.py script -# - name: Checkout repository -# uses: actions/checkout@v3 - - # - name: Generate issues CSV - # run: | - # echo "Issue Number,Title,Labels,Projects" > issues.csv - # curl -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \ - # "https://api.github.com/repos/${{ github.repository }}/issues?state=all&per_page=100" | \ - # jq -r '.[] | select(.pull_request == null) | [.number, .title, (.labels | map(.name) | join(",")), (.projectCards | map(.project_url) | join(","))] | @csv' >> issues.csv - # env: - # GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - # - name: Upload CSV as artifact - # uses: actions/upload-artifact@v3 - # with: - # name: issues-csv - # path: issues.csv - - # # Setup Python environment - # - name: Set up Python - # uses: actions/setup-python@v4 - # with: - # python-version: '3.8' - - # # Install Python dependencies (if any) - # - name: Install dependencies - # run: pip install -r requirements.txt - - # # Run the script to generate the CSV file - # - name: Generate CSV of issues - # run: python generate_csv.py - # env: - # GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - # OUTPUT_FILENAME: issues.csv - - # # Upload issues.csv as an artifact - # - name: Upload CSV as artifact - # uses: actions/upload-artifact@v3 - # with: # name: issues-csv # path: issues.csv From e51f2c5a61868b2c47a1331519260b84007442cd Mon Sep 17 00:00:00 2001 From: n2020h <72112832+n2020h@users.noreply.github.com> Date: Mon, 26 Feb 2024 18:28:34 -0800 Subject: [PATCH 10/49] update script to bypass gitignore and add file to repo on branch --- .github/workflows/issues-to-csv.yml | 27 +++++++++++++++------------ 1 file changed, 15 insertions(+), 12 deletions(-) diff --git a/.github/workflows/issues-to-csv.yml b/.github/workflows/issues-to-csv.yml index 21b1b39..6752cfe 100644 --- a/.github/workflows/issues-to-csv.yml +++ b/.github/workflows/issues-to-csv.yml @@ -25,20 +25,23 @@ jobs: curl -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \ "https://api.github.com/repos/${{ github.repository }}/issues?state=all&per_page=100" | \ jq -r '.[] | select(.pull_request == null) | [.number, .title, (.labels | map(.name) | join(","))] | @csv' >> issues.csv + + #force add file to repo, commit and push + - name: Commit and push + run: | + git config user.name "Automated" + git config user.email "actions@users.noreply.github.com" + git add -f issues.csv + timestamp=$(date -u) + git commit -m "Latest data: ${timestamp}" || exit 0 + git push origin HEAD:refs/heads/n2020h-issues-to-csv - # - name: Commit CSV to Repository - # uses: stefanzweifel/git-auto-commit-action@v4 + # # Upload CSV as artifact + # - name: Upload CSV as artifact + # uses: actions/upload-artifact@v2 # with: - # commit_message: "Update issues CSV" - # file_pattern: issues.csv - # branch: main - - # Upload CSV as artifact - - name: Upload CSV as artifact - uses: actions/upload-artifact@v2 - with: - name: issues-csv - path: issues.csv + # name: issues-csv + # path: issues.csv # name: issues-csv From a41128ad57113796168594c069da519d83fed0f2 Mon Sep 17 00:00:00 2001 From: n2020h <72112832+n2020h@users.noreply.github.com> Date: Mon, 26 Feb 2024 18:44:39 -0800 Subject: [PATCH 11/49] force push to feature branch --- .github/workflows/issues-to-csv.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/issues-to-csv.yml b/.github/workflows/issues-to-csv.yml index 6752cfe..8384049 100644 --- a/.github/workflows/issues-to-csv.yml +++ b/.github/workflows/issues-to-csv.yml @@ -34,7 +34,7 @@ jobs: git add -f issues.csv timestamp=$(date -u) git commit -m "Latest data: ${timestamp}" || exit 0 - git push origin HEAD:refs/heads/n2020h-issues-to-csv + git push --force origin HEAD:refs/heads/n2020h-issues-to-csv # # Upload CSV as artifact # - name: Upload CSV as artifact From 0963a4e7ca4f6a33225519b76b95e16c1cee6cd9 Mon Sep 17 00:00:00 2001 From: Automated Date: Tue, 27 Feb 2024 02:45:13 +0000 Subject: [PATCH 12/49] Latest data: Tue Feb 27 02:45:12 UTC 2024 --- issues.csv | 46 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 46 insertions(+) create mode 100644 issues.csv diff --git a/issues.csv b/issues.csv new file mode 100644 index 0000000..4120832 --- /dev/null +++ b/issues.csv @@ -0,0 +1,46 @@ +Issue Number,Title,Labels +194,"Create data dictionary (EDA task)","" +193,"DRAFT: Access to ""Third Spaces""","" +191,"DRAFT: MediaWiki NLP Project","" +190,"Create Base64 and Env File Tutorial","" +189,"Refining the Requirements for Skills Match Dashboard","" +183,"MERL Center Data Cleaning","role: missing,project duration: one time,size: 1pt,project: MC Southern Africa NGOs' OS usage" +182,"Active and Inactive Businesses of LA County","role: data science,epic,size: 3pt,project: EDA" +181,"Affordable Housing Access to Food Pantries","role: data science,epic,size: 3pt,project: EDA" +180,"City of Los Angeles Arrests","role: data science,epic,size: 3pt,project: EDA" +179,"City of Los Angeles Evictions","role: data science,epic,size: 3pt,project: EDA" +178,"Find data sources through the LA Controller's office","documentation,good first issue,role: data analysis,size: 1pt,project: missing" +177,"Create 311 data CSV files that can be accessed through a Jupyter notebook","role: data science,project: 311 Data,size: 3pt,feature: project management" +170,"California Grants Portal Data Set EDA","role: data analysis,project duration: ongoing,size: 1pt,project: EDA" +169,"Potential Projects: California Grants Portal","role: data science,epic,size: epic,dependency,project: EDA" +163,"Complete Intro to Command Line and Git Tutorial","feature: guide,role: data science,size: 1pt" +162,"Complete Intro to Python Tutorial","feature: guide,role: data science,size: 1pt,draft,ready for product" +161,"Project Name: Lucky Parking","feature: missing,role: data analysis,size: 1pt,project: missing" +157,"Create Deep Learning Tutorial","documentation,feature: guide,role: org,size: 1pt" +156,"Create Machine Learning Tutorial","documentation,feature: guide,role: org,size: 1pt" +155,"Create Stats Tutorial","documentation,feature: guide,role: org,size: 1pt" +154,"Create Data Ops Tutorial","documentation,feature: guide,role: org,size: 1pt" +153,"Create Text Analysis Tutorial","documentation,feature: guide,role: org,size: 1pt" +152,"Open Source Projects Data Set for Climate projects","role: data analysis,size: 1pt,epic: GreenEarthOS,project: climate projects,draft" +149,"Weekly Label Check","role: product,size: 1pt,feature: project management" +160,"Survey: Repo Labels","project duration: ongoing,size: 2pt,time sensitive,role: Data Engineer,feature: labels" +148,"Create Geospatial Data Analysis Tutorial","documentation,feature: guide,role: org,size: 1pt" +147,"Create Data Engineering Tutorial","documentation,feature: guide,role: org,size: 1pt" +146,"Create Web Scraping Tutorial","documentation,feature: guide,role: org,size: 1pt" +145,"Create Data Analysis With R Tutorial","documentation,feature: guide,role: org,size: 1pt" +144,"Create Data Visualization Tutorial","documentation,feature: guide,role: org,size: 1pt,feature: needs peer review" +143,"Create ETL/Data Cleaning Tutorial","documentation,feature: guide,role: org,size: 1pt" +141,"Obtain Shape Files for Different District Types (2023)","good first issue,role: data analysis,project duration: one time,dependency,size: 1pt,project: district types" +140,"Create a logo for the Data Science CoP","role: product,role: org,project duration: one time,size: 2pt,feature: branding" +138,"Starter Project for New Data Analysts and Data Scientists","role: product,role: CoP lead,feature: onboarding,size: epic,dependency" +137,"Data Science CoP Meeting Agendas (Monday 7PM PST)","feature: guide,role: product,size: 1pt,feature: agenda" +135,"Find and document all the 311 public data sets","role: data analysis,project: 311 Data,size: 2pt,epic: 311 public data sets" +134,"Overview Dashboard - add titles to graphs","feature: guide,role: data science,project: 311-data-dashboards,size: 1pt,epic: 311 public data sets" +133,"Create labels, features, milestones","role: product,dependency,size: 3pt,feature: project management" +131,"Story Size for Data Science Issues","role: product,size: 1pt,feature: project management" +130,"Create a Guide: Web Scraping","Guide: Research,feature: guide,role: org,size: 5pt" +124,"Obtain Shape Files for Different District Types as of Nov/Dec 2021","feature: guide,role: data analysis,project duration: one time,size: 1pt,project: district types" +121,"Template For Creating Epics For Data Science Projects","role: product,size: 1pt,feature: project management" +120,"Structured Context for HfLA created data sets","feature: guide,role: data science,size: epic,project: structured context,TG: Draft Template" +118,"Create district types reusable tool (API, single dataset, etc.)","feature: guide,role: data analysis,epic,size: epic,size: 1pt" +94,"Data Science Competitive/Comparative Analysis","feature: guide,role: product,size: 2pt" From f4914757c63fd3e30e77d95856501d33948bc9cf Mon Sep 17 00:00:00 2001 From: n2020h <72112832+n2020h@users.noreply.github.com> Date: Mon, 22 Jul 2024 16:35:43 -0400 Subject: [PATCH 13/49] Update issues-to-csv to add more fields.yml --- .github/workflows/issues-to-csv.yml | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/.github/workflows/issues-to-csv.yml b/.github/workflows/issues-to-csv.yml index 8384049..f216da5 100644 --- a/.github/workflows/issues-to-csv.yml +++ b/.github/workflows/issues-to-csv.yml @@ -18,14 +18,21 @@ jobs: - name: Checkout repository uses: actions/checkout@v3 + # Generate issues CSV + # - name: Generate issues CSV + # run: | + # echo "Issue Number,Title,Labels" > issues.csv + # curl -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \ + # "https://api.github.com/repos/${{ github.repository }}/issues?state=all&per_page=100" | \ + # jq -r '.[] | select(.pull_request == null) | [.number, .title, (.labels | map(.name) | join(","))] | @csv' >> issues.csv + # Generate issues CSV - name: Generate issues CSV run: | - echo "Issue Number,Title,Labels" > issues.csv + echo "Issue Number,Title,Labels,State,Assignee,Milestone,Comments,Created At,Updated At,Closed At,Author Association,Body" > issues.csv curl -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \ "https://api.github.com/repos/${{ github.repository }}/issues?state=all&per_page=100" | \ - jq -r '.[] | select(.pull_request == null) | [.number, .title, (.labels | map(.name) | join(","))] | @csv' >> issues.csv - + jq -r '.[] | select(.pull_request == null) | [.number, .title, (.labels | map(.name) | join(",")), .state, (.assignee | if . != null then .login else "None" end), (.milestone | if . != null then .title else "None" end), .comments, .created_at, .updated_at, (.closed_at // "None"), .author_association, .body] | @csv' >> issues.csv #force add file to repo, commit and push - name: Commit and push run: | From 36f4b152aa40854fdee1002df26696ceb0009282 Mon Sep 17 00:00:00 2001 From: n2020h <72112832+n2020h@users.noreply.github.com> Date: Mon, 22 Jul 2024 16:48:55 -0400 Subject: [PATCH 14/49] Update issues-to-csv.yml Runs on push and pull request: branches -feature branch --- .github/workflows/issues-to-csv.yml | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/.github/workflows/issues-to-csv.yml b/.github/workflows/issues-to-csv.yml index f216da5..673fae8 100644 --- a/.github/workflows/issues-to-csv.yml +++ b/.github/workflows/issues-to-csv.yml @@ -1,13 +1,18 @@ name: List Issues and Output as CSV on: + push: + branches: + - n2020h-issues-to-csv workflow_dispatch: schedule: - - cron: '*/10 * * * *' + - cron: '*/5 * * * *' issues: types: [opened, closed, reopened] pull_request: types: [opened, closed, reopened] + branches: + - n2020h-issues-to-csv jobs: From c62826baa62b581c90cd5b0eaedc624ee84020b2 Mon Sep 17 00:00:00 2001 From: Automated Date: Mon, 22 Jul 2024 20:49:10 +0000 Subject: [PATCH 15/49] Latest data: Mon Jul 22 20:49:10 UTC 2024 --- issues.csv | 1178 ++++++++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 1132 insertions(+), 46 deletions(-) diff --git a/issues.csv b/issues.csv index 4120832..4e20a58 100644 --- a/issues.csv +++ b/issues.csv @@ -1,46 +1,1132 @@ -Issue Number,Title,Labels -194,"Create data dictionary (EDA task)","" -193,"DRAFT: Access to ""Third Spaces""","" -191,"DRAFT: MediaWiki NLP Project","" -190,"Create Base64 and Env File Tutorial","" -189,"Refining the Requirements for Skills Match Dashboard","" -183,"MERL Center Data Cleaning","role: missing,project duration: one time,size: 1pt,project: MC Southern Africa NGOs' OS usage" -182,"Active and Inactive Businesses of LA County","role: data science,epic,size: 3pt,project: EDA" -181,"Affordable Housing Access to Food Pantries","role: data science,epic,size: 3pt,project: EDA" -180,"City of Los Angeles Arrests","role: data science,epic,size: 3pt,project: EDA" -179,"City of Los Angeles Evictions","role: data science,epic,size: 3pt,project: EDA" -178,"Find data sources through the LA Controller's office","documentation,good first issue,role: data analysis,size: 1pt,project: missing" -177,"Create 311 data CSV files that can be accessed through a Jupyter notebook","role: data science,project: 311 Data,size: 3pt,feature: project management" -170,"California Grants Portal Data Set EDA","role: data analysis,project duration: ongoing,size: 1pt,project: EDA" -169,"Potential Projects: California Grants Portal","role: data science,epic,size: epic,dependency,project: EDA" -163,"Complete Intro to Command Line and Git Tutorial","feature: guide,role: data science,size: 1pt" -162,"Complete Intro to Python Tutorial","feature: guide,role: data science,size: 1pt,draft,ready for product" -161,"Project Name: Lucky Parking","feature: missing,role: data analysis,size: 1pt,project: missing" -157,"Create Deep Learning Tutorial","documentation,feature: guide,role: org,size: 1pt" -156,"Create Machine Learning Tutorial","documentation,feature: guide,role: org,size: 1pt" -155,"Create Stats Tutorial","documentation,feature: guide,role: org,size: 1pt" -154,"Create Data Ops Tutorial","documentation,feature: guide,role: org,size: 1pt" -153,"Create Text Analysis Tutorial","documentation,feature: guide,role: org,size: 1pt" -152,"Open Source Projects Data Set for Climate projects","role: data analysis,size: 1pt,epic: GreenEarthOS,project: climate projects,draft" -149,"Weekly Label Check","role: product,size: 1pt,feature: project management" -160,"Survey: Repo Labels","project duration: ongoing,size: 2pt,time sensitive,role: Data Engineer,feature: labels" -148,"Create Geospatial Data Analysis Tutorial","documentation,feature: guide,role: org,size: 1pt" -147,"Create Data Engineering Tutorial","documentation,feature: guide,role: org,size: 1pt" -146,"Create Web Scraping Tutorial","documentation,feature: guide,role: org,size: 1pt" -145,"Create Data Analysis With R Tutorial","documentation,feature: guide,role: org,size: 1pt" -144,"Create Data Visualization Tutorial","documentation,feature: guide,role: org,size: 1pt,feature: needs peer review" -143,"Create ETL/Data Cleaning Tutorial","documentation,feature: guide,role: org,size: 1pt" -141,"Obtain Shape Files for Different District Types (2023)","good first issue,role: data analysis,project duration: one time,dependency,size: 1pt,project: district types" -140,"Create a logo for the Data Science CoP","role: product,role: org,project duration: one time,size: 2pt,feature: branding" -138,"Starter Project for New Data Analysts and Data Scientists","role: product,role: CoP lead,feature: onboarding,size: epic,dependency" -137,"Data Science CoP Meeting Agendas (Monday 7PM PST)","feature: guide,role: product,size: 1pt,feature: agenda" -135,"Find and document all the 311 public data sets","role: data analysis,project: 311 Data,size: 2pt,epic: 311 public data sets" -134,"Overview Dashboard - add titles to graphs","feature: guide,role: data science,project: 311-data-dashboards,size: 1pt,epic: 311 public data sets" -133,"Create labels, features, milestones","role: product,dependency,size: 3pt,feature: project management" -131,"Story Size for Data Science Issues","role: product,size: 1pt,feature: project management" -130,"Create a Guide: Web Scraping","Guide: Research,feature: guide,role: org,size: 5pt" -124,"Obtain Shape Files for Different District Types as of Nov/Dec 2021","feature: guide,role: data analysis,project duration: one time,size: 1pt,project: district types" -121,"Template For Creating Epics For Data Science Projects","role: product,size: 1pt,feature: project management" -120,"Structured Context for HfLA created data sets","feature: guide,role: data science,size: epic,project: structured context,TG: Draft Template" -118,"Create district types reusable tool (API, single dataset, etc.)","feature: guide,role: data analysis,epic,size: epic,size: 1pt" -94,"Data Science Competitive/Comparative Analysis","feature: guide,role: product,size: 2pt" +Issue Number,Title,Labels,State,Assignee,Milestone,Comments,Created At,Updated At,Closed At,Author Association,Body +204,"Recruit volunteers for team open roles","role: missing,epic,ready for product,size: 0.25pt,feature: recruiting","open","None","None",2,"2024-06-14T19:15:48Z","2024-06-22T03:14:32Z","None","CONTRIBUTOR","### Dependency +- [ ] need to recruit new team members to Data Science projects team + +### Overview +We need volunteer(s) to work on the open issues we have so that we can move the project forward. + +### Action Items +#### Creating a new role post +- [ ] Copy the template post from the Community of Practice WIKI, Open Roles Templates (Resource 1.01) +- [ ] Paste the template in a comment below +- [ ] Customize the comment to reflect your open role +- [ ] Open a new issue by using the Open Role template (Resource 1.02) +- [ ] Copy and Paste your customized comment to the new issue +- [ ] Add the label for the role that you are recruiting for +- [ ] Submit the new issue +- [ ] Assign yourself to the new issue +- [ ] Add the issue to the HfLA: Open Roles project board (Resource 1.03) +- [ ] Add the link of the new issue to the related comment on this issue +- [ ] Keep yourself assigned to this issue until the role is filled + +#### Marketing the new role post +- Post a link to the new issue in the Slack channel for the related Practice Area + - [ ] Copy the Slack message (Resource 1.04) + - [ ] Copy the message into the Slack Channel for the relevant Community of Practice (The Slack channels can be found on Resource 1.01) + - [ ] Replace the text `[Replace with LINK TO NEW ISSUE]` with link to the issue + - [ ] Check your team Slack channel for people reaching out after every onboarding and at least every other day + +#### Once Filled +- [ ] Hide the comment on this issue for the role you just filled +- [ ] Add the following comment to the role posting issue + ``` + This role has been filled by @[Replace with GITHUB HANDLE OF PERSON WHO IS TAKING ROLE] + ``` +- [ ] Close the role posting issue +- [ ] Unassign yourself from this issue +- If there are no open roles on this issue (i.e., if there are open roles, there will be unclosed comments) + - [ ] Add the label: Dependency and uncheck the dependency on top of this issue + - [ ] Move this issue to the icebox, on the project board + +### Resources/Instructions +- 1.01 [Community of Practice WIKI, Open Roles Templates, Secondary Issue](https://github.com/hackforla/communities-of-practice/wiki/Open-Roles-Templates#secondary-issue) +- 1.02 [Open Role Template: see Secondary Issue Draft](https://github.com/hackforla/communities-of-practice/wiki/Open-Roles-Templates) +- 1.03 [HfLA: Open Roles board](https://github.com/orgs/hackforla/projects/67) +- 1.04 Copy to post in CoP Slack Channels when there is an open role + ``` + Hi, the Data Science CoP has a volunteer posting, which includes details of how to get in touch with us if you are interested. [Replace with LINK TO NEW ISSUE] + ```" +203,"Prep project boards for Migration","role: product","open","akhaleghi","None",36,"2024-06-10T17:44:29Z","2024-06-24T20:58:57Z","None","CONTRIBUTOR","### Overview +We need to prepare the Project Boards for the forced migration that GitHub is doing in August, so that it is already working well for the team by then. + +### Action Items +- [x] copy this template to a comment below + ```` + ### Text from project board card + ``` + [INSERT MARKDOWN HERE] + ``` + - [ ] Determine where this item goes on the wiki (page and section) + - [ ] copy item to wiki + - [ ] hide this comment when completed. + ```` +- [ ] copy markdown from a card in the ""Start Here"" column of the Project Board into your new comment where it says `[INSERT MARKDOWN HERE] + +#### All issues are on the project board + - [ ] Check to see if all issues are on the project board (resource 1.02) + - [ ] If not, add them to the board, and put them in the correct columns + - [ ] Categorize (can be done later) + +### Resources/Instructions +Project board URLs: +[New board](https://github.com/orgs/hackforla/projects/65) +[Old board](https://github.com/hackforla/data-science/projects/1) +" +202,"Information for New and Existing Members","CoP: Data Science","open","None","None",0,"2024-06-10T17:31:57Z","2024-06-24T21:01:49Z","None","CONTRIBUTOR","**Meeting Times** +Every Monday at 7:00PM Pacific. No meetings on the first Monday of the month. +[Zoom Link](https://us06web.zoom.us/j/87038212377?pwd=bnh3bkJGeWJ0eS94YTRzaVFhZ2Nwdz09) + +**Important Links** +[Google Drive](https://drive.google.com/drive/folders/1maraSDu5BXzCYWFubY834Nq-8AT93FUJ?usp=sharing) +[Data Science Team Roster](https://docs.google.com/spreadsheets/d/1QJltNh1gOybfebe-RkT-xS7m4OtxbuFfaJ4OujeA4h0/edit#gid=0) +Please contact Abe Khaleghi on Slack if you require access to the above sites. + +**For Letters Confirming Hack for LA Participation** +Please see: https://github.com/hackforla/governance/issues/61 + +**Handy Tools and Sites** +[Full Page Screenshot Chrome ext](https://chrome.google.com/webstore/detail/gofullpage-full-page-scre/fdpohaocaechififmbbbbbknoalclacl?hl=en) +[311 Data API Access](https://dev-api.311-data.org/docs) +" +200,"EPA Data Set","","open","salice","None",2,"2024-04-30T02:37:50Z","2024-07-01T17:04:25Z","None","CONTRIBUTOR","### Overview +REPLACE THIS TEXT -Text here that clearly states the purpose of this issue in 2 sentences or less. + +### Action Items +### Action Items +- [ ] Add all data sources to Resources section below + - [ ] EDA Tasks + - [ ] Combine data from years into one data set and see differences + - [ ] Data Dictionary + - [ ] Data Cleaning +- [ ] Write one-sheet + - [ ] Define stakeholder (Access the data and 311 teams used for educational puposes) + - [ ] Summarize project including value add + - [ ] Define project 6 month roadmap + - [ ] Detail history (if any) +- [ ] Define tools to be used to visualize combined data + +Additional tasks TBD + +### Resources/Instructions +- [Data source](https://www.epa.gov/outdoor-air-quality-data/download-daily-data) +- Is there a link to an API to access the data? +" +198,"Update the About us page on wiki","role: product,feature: onboarding,project duration: one time,ready for product,project: Data Science CoP Maintenance,size: 0.25pt","open","max1million101","time sensitive",0,"2024-03-29T19:21:52Z","2024-07-04T20:25:37Z","None","MEMBER","### Overview +We need to have contact information and overview of who is running the Data Science Community of Practice + +### Action Items +- [x] Change ""Mentors"" to be Current Mentors +- [ ] Move Ryan Swan to a section of page, with the following header + ``` + ### Former Mentors + ``` +- [ ] Update product manager to list Abe's name and bio +- [ ] Update product manager section to add Tania Khan to the page + +### Resources/Instructions +https://github.com/hackforla/data-science/wiki/About-us" +194,"Create data dictionary (EDA task)","feature: missing,role: missing,size: missing,project: missing","open","None","None",1,"2024-02-18T06:36:30Z","2024-03-29T19:35:15Z","None","MEMBER", +193,"DRAFT: Access to ""Third Spaces""","feature: missing,role: missing,size: missing,project: missing","open","None","None",0,"2024-02-12T22:46:40Z","2024-03-29T19:35:14Z","None","CONTRIBUTOR","### Overview +Project idea from Karina to examine accessibility to green spaces (parks, public beaches, etc.) within the City of LA. Ideas include: + - Combining metro/crime (e..g, vehicle crash data) data sets to see how accessible (transportation-wise) third green spaces are for the broad population. + - Examine population density info get a sense of the size of parks x population it’s serving in a mile-radius. Analysis can be helpful in many (deciding where to hold public events, determine underserved areas for new green spaces, etc) + +### Data Sources Starting Point +Some data sources to start exploration can be found here: +- [LA Metro dataset](https://developer.metro.net/gis-data/) +- [LA City Parks](https://www.google.com/url?sa=t&rct=j&q=&esrc=s&source=web&cd=&ved=2ahUKEwik1LT4oKeEAxUtMUQIHeT5DJMQFnoECBUQAQ&url=https%3A%2F%2Fdata.lacity.org%2FParks-Recreation%2FDepartment-of-Recreation-and-Parks-GIS-Map-of-Park%2Fnuub-r4zx&usg=AOvVaw0ixMbMAc7m57sctXQOpKht&opi=89978449) +- [LA census data ](https://data.census.gov/table?g=050XX00US06037) + +_Additional Data sources may be used for this project._ + +### Next steps +- [ ] Decide if this should be one issue or multiple. +- [ ] Decide on what additional analyses we can do +- [ ] Complete Project Proposal outlining: Objective, Expected Deliverable(s), Data Sources & Limitations, Research outline. Ideally, proposal should outline possible stakeholder(s) (e.g, LA resident, city council) of project deliverables. + +### Project may involve: +Python, GIS data, data analytics, dashboard-building, data modeling + + +" +191,"DRAFT: MediaWiki NLP Project","feature: missing,role: missing,size: missing,project: missing","open","salice","None",1,"2024-01-09T03:29:17Z","2024-04-12T19:24:57Z","None","CONTRIBUTOR","### Overview +We want to generate ideas to prepare this issue for our CoP + +### Action Items +- [ ] Review MediaWiki API site [here](https://www.mediawiki.org/wiki/API:Main_page) + +### Resources/Instructions + +" +190,"Create Base64 and Env File Tutorial","feature: missing,role: missing,size: missing,project: missing","open","None","None",0,"2023-12-01T16:26:17Z","2024-03-29T19:35:13Z","None","MEMBER","### Overview + +Write a tutorial on base64 and usage of .env files based on experience using them when completing automation dashboard project for Hack for LA website team. This would be useful for a data scientist/ analyst who needs to upload their Python script to a GitHub repository without compromising the security of confidential API keys and other (privacy) information. + +### Action Items +- [x] Create an introduction that gives context as to the purpose/use of the tutorial and connects the 2 concepts based on commonality +- [x] Create full walkthroughs of process to use both +- [x] Search for useful resources to complement tutorial +- [x] Add screenshots and resize them to appropriate sizes +- [x] Proofread for misleading language +- [x] Message Sophia and Karina to get tutorial approved + +### Resources/Instructions +All resources have been included in the body of the tutorial + +Wiki Tutorial Created: [Introduction to Keeping Confidential Information Safe on GitHub: GitHub secrets and .env files](https://github.com/hackforla/data-science/wiki/Introduction-to-Keeping-Confidential-Information-Safe-on-GitHub:-GitHub-secrets-and-.env-files) + +" +189,"Refining the Requirements for Skills Match Dashboard","role: missing,size: missing,project: missing,feature: skills / productivity dashboard","open","n2020h","None",6,"2023-11-21T03:52:00Z","2024-04-15T21:06:37Z","None","MEMBER","### Overview +We need to gather previous relevant documentation of features and functions skills dashboard and refine requirements, so that we can develop it. + +### Action Items + +- [ ] Bonnie will add links to prior known issues from other projects +- [ ] + +### Resources/Instructions +https://docs.google.com/spreadsheets/d/1SV0j3NKcjOpHuT3okyM36xwWA966hNA4N1zmqRtd8B8/edit#gid=11574199 +" +183,"MERL Center Data Cleaning","role: missing,project duration: one time,size: 1pt,project: MC Southern Africa NGOs' OS usage","closed","None","None",8,"2023-10-11T18:00:56Z","2024-01-16T20:39:32Z","2024-01-16T20:39:25Z","MEMBER","### Overview +We need data collected through KoboToolbox to be analyzed and then visualized so that the findings can be incorporated in an article for the MERL Center +### Action Items +- [ ] Clean data +- [ ] Provide analysis +- [ ] Create visualization + +### Resources/Instructions +- [MERL Center Folder on Data Science CoP Drive](https://drive.google.com/drive/folders/1gXWyLtwfl697hCVMS-D1tC0djPf3woV5?usp=drive_link) +- [Survey Data](https://docs.google.com/spreadsheets/d/15zLn9NBXkadAsA8SjuXwGHjGaaJJQUcy05V9Ksm0oh0/edit?usp=drive_link) + - First Tab: Survey data with PII removed + - Second Tab: Bonnie's demonstration for data science community (wrapped column headers, highlighted related columns with colors, starting at GN through HG) + - Please note: column K has duplicate numbers to indicate multiple people from the same organization (meaningful because in column L, different people from the same organization indicated different category) +- [Survey Form](https://ee.kobotoolbox.org/x/and3RucH)" +182,"CoP: Data Science: Active and Inactive Businesses of LA County","role: data science,epic,size: 3pt,project: EDA","open","rahul897","None",22,"2023-08-30T18:11:02Z","2024-06-25T02:13:03Z","None","CONTRIBUTOR","### Prerequisite(s) +If you would like to work on this issue, please add a comment below and include the following information: +- Your name +- How many hours you can commit to working on this in the next week (minimum of 2) +- Commit to providing an update with a comment before the next community of practice meeting + +For example: +- John Doe +- I can commit to working on this issue 3 hours in the following week. +- Yes, I will provide an update on my progress with a comment below. + +Once you have done this, please add yourself to the “Assignees” section on the right and update the issue weekly to document your progress. + +### Overview +We want to create a usable dataset of active and inactive businesses to perform various time series analyses (i.e. visualizing business closures during the covid pandemic). + +### Action Items +Phase 1 +- [ ] Find available data sources and add to Resources section +- [ ] Create data dictionary (EDA task) +- [ ] Create issues required to fulfill project requirements, including exploratory data analysis, required tasks, and deliverables + - [ ] Perform data cleaning (EDA task) + - [ ] Understand and outline data context +- [ ] Write one-sheet (see Resources below) + - [ ] Define stakeholder + - [ ] Summarize project, including value add + - [ ] Define project 6 month roadmap + - [ ] Detail history (if any) + +### Resources/Instructions +[Data source](https://data.lacity.org/Administration-Finance/Listing-of-All-Businesses/r4uk-afju) for business listings in LA County. +" +181,"CoP: Data Science: Affordable Housing Access to Food Pantries","role: data science,epic,size: 3pt,project: EDA","closed","MDivyaPrakash","None",10,"2023-08-30T18:00:11Z","2024-06-26T07:08:36Z","2024-06-26T07:08:36Z","CONTRIBUTOR","### Prerequisite(s) +If you would like to work on this issue, please add a comment below and include the following information: +- Your name +- How many hours you can commit to working on this in the next week (minimum of 2) +- Commit to providing an update with a comment before the next community of practice meeting + +For example: +- John Doe +- I can commit to working on this issue 3 hours in the following week. +- Yes, I will provide an update on my progress with a comment below. + +Once you have done this, please add yourself to the “Assignees” section on the right and update the issue weekly to document your progress. + +### Overview +We want to analyze to what extent affordable housing residents have access to food pantries and meal programs. To do this, we want to examine the geographic location of these housing projects in relation to their accessibility to pantries and programs. + +### Action Items +Phase 1 +- [x] Find available data sources and add to Resources section, particularly the Food Oasis data. +- [x] Determine is this is one-time or ongoing project (and assign appropriate label) +- [x] Write one-sheet (see Resources below) + - [x] Define stakeholder + - [x] Summarize project, including value add + - [x] Define project 6 month roadmap + - [x] Detail history (if any) +- [x] Define tools to be used for analysis and visualization (if applicable) +- [x] Create issues required to fulfill project requirements, including exploratory data analysis, required tasks, and deliverables + - [x] Create data dictionary (EDA task) + - [x] Perform data cleaning (EDA task) + - [x] Understand and outline data context + +### Resources/Instructions (Updated) +- [Data source](https://data.lacity.org/Housing-and-Real-Estate/LAHD-Affordable-Housing-Projects-List-2003-to-Pres/mymu-zi3s) for affordable housing locations (updated monthly). +- [Visualization](https://data.lacity.org/Housing-and-Real-Estate/LAHD-Affordable-Housing-Projects-List-2003-to-Pres/psx4-6drg) of affordable housing locations. +- [Food Oasis Wiki](https://github.com/hackforla/food-oasis/wiki) +- [LA Food Bank](https://www.lafoodbank.org/find-food/pantry-locator/) +- [LA County Food Distribution Sources and Programs](http://publichealth.lacounty.gov/nut/food-distribution-resources.htm) +- [Chirp LA](https://www.chirpla.org/sites/chirpla.org/files/u532/Food%20Bank%20Guide%202021_4.pdf) PDF containing active food banks in LA +" +180,"City of Los Angeles Arrests","role: data science,epic,size: 3pt,project: EDA","open","dolla24","None",11,"2023-08-23T01:36:50Z","2024-03-19T01:54:19Z","None","CONTRIBUTOR","### Overview +We want to analyze arrest data for the city of Los Angeles, and incorporate data from other sources, to determine its suitability for further analysis by Hack for LA. More information about the data, from the [Controller's office website](): +- The LAPD arrest types fall into five categories identified by the LAPD: (1) felony, (2) misdemeanor, (3) infractions, (4) dependent, and (5) other. +- Under California law, a felony is a crime that is punishable with death, by imprisonment in the state prison, +or . . . by imprisonment in a county jail under [certain provisions]. Every other crime or public offense is a misdemeanor except those offenses that are classified as infractions. Misdemeanors and infractions carry varying degrees of financial and incarceration consequences. +- LAPD makes more arrests for misdemeanor and infraction offenses than for felonies. + - 2019: 55,954 misdemeanor & infraction arrests vs. 33,663 felony arrests + - 2020: 34,659 misdemeanor & infraction arrests vs. 31,015 felony arrests + - 2021: 33,179 misdemeanor & infraction arrests vs. 32,597 felony arrests + - 2022: 30,431 misdemeanor & infraction arrests vs. 30,378 felony arrests + +### Action Items +Phase 1 +- [x] Find available data sources and add to Resources section +- [x] #194 +- [ ] Determine is this is one-time or ongoing project (and assign appropriate label) +- [ ] Write one-sheet (see Resources below) + - [ ] Define stakeholder + - [ ] Summarize project, including value add + - [ ] Define project 6 month roadmap + - [ ] Detail history (if any) +- [x] Define tools to be used for analysis and visualization (if applicable) +- [ ] Create issues required to fulfill project requirements, including exploratory data analysis, required tasks, and deliverables + - [ ] Perform data cleaning (EDA task) + - [ ] Understand and outline data context + +### Resources/Instructions +[Arrest data for 2022 csv file](https://drive.google.com/drive/folders/1QSOWZxUZWQzEsERrtoI3SiFsAUZruZzk) +[Arrest data from the City of Los Angeles](https://www.kaggle.com/datasets/cityofLA/los-angeles-crime-arrest-data +) +Check #178 for updates on whether a real time source for this data have been found +" +179,"CoP: Data Science: City of Los Angeles Evictions","role: data science,epic,size: 3pt,project: EDA","closed","rahul897","None",14,"2023-08-23T01:29:03Z","2024-07-09T21:45:53Z","2024-07-09T21:45:53Z","CONTRIBUTOR","### Prerequisite(s) +If you would like to work on this issue, please add a comment below and include the following information: +- Your name +- How many hours you can commit to working on this in the next week (minimum of 2) +- Commit to providing an update with a comment before the next community of practice meeting + +For example: +- John Doe +- I can commit to working on this issue 3 hours in the following week. +- Yes, I will provide an update on my progress with a comment below. + +Once you have done this, please add yourself to the “Assignees” section on the right and update the issue weekly to document your progress. + +### Overview +We want to analyze eviction data for the city of Los Angeles, and incorporate data from other sources, to determine whether there are actions local leaders can take to address the problem. The following background information is from the [LA Controller's website](https://controller.lacity.gov/landings/evictions): +- August 1, 2023 – rent owed from March 1, 2020 to August 31, 2020 is due. If the Declaration of COVID-19-Related Financial Distress form was returned to the landlord within 15 days of rent being due, they cannot be evicted for nonpayment of rent. +- February 1, 2024 – rent owed from October 1, 2021 to January 31, 2023 is due. If a tenant returned the Declaration of COVID-19-Related Financial Distress form to the landlord within 15 days of rent being due AND paid 25% of rent owed from this period, they cannot be evicted for nonpayment of rent. +- However, since March 27, 2023, landlords may not evict a tenant who falls behind in rent unless the tenant owes an amount higher than the Fair Market Rent (FMR). The FMR depends on the bedroom size of the rental unit. + +### Action Items +Phase 1 +- [ ] Find available data sources and add to Resources section +- [ ] Perform Exploratory Data Analysis (read more [here](https://www.analyticsvidhya.com/blog/2021/08/how-to-perform-exploratory-data-analysis-a-guide-for-beginners/) + - [ ] Create data dictionary (EDA task) + - [ ] Perform data cleaning (EDA task) + - [ ] Understand and outline data context +- [ ] Determine is this is one-time or ongoing project (and assign appropriate label) +- [ ] Write one-sheet (see Resources below) + - [ ] Define stakeholder + - [ ] Summarize project, including value add + - [ ] Define project 6 month roadmap + - [ ] Detail history (if any) +- [ ] Define tools to be used for analysis and visualization (if applicable) +- [ ] Create issues required to fulfill project requirements, including exploratory data analysis, required tasks, and deliverables + + +### Resources/Instructions +[Feb 2023 - July 2023 eviction data csv file](https://drive.google.com/drive/folders/1uyPtg1MNX5LIDwQkFtErmIQJNe9N7X25) +Check #178 for updates on whether a real time source for this data have been found +" +178,"CoP: Data Science: Find data sources through the LA Controller's office","documentation,good first issue,role: data analysis,size: 1pt,project: missing","open","FragariaChemist","None",4,"2023-08-23T01:14:06Z","2024-07-09T21:50:07Z","None","CONTRIBUTOR","### Prerequisite(s) +Skills: Online research + +If you would like to work on this issue, please add a comment below and include the following information: +- Your name +- How many hours you can commit to working on this in the next week (minimum of 2) +- Commit to providing an update with a comment before the next community of practice meeting + +For example: +- John Doe +- I can commit to working on this issue 3 hours in the following week. +- Yes, I will provide an update on my progress with a comment below. +Once you have done this, please add yourself to the “Assignees” section on the right and update the issue weekly to document your progress. + +### Overview +We want to build a library of various local data sources so that we can initiate new data science projects for volunteers that will be useful for the local community. The LA Controller's office seems to have quite a few. + +### Action Items +- [ ] Visit the LA City Controller's website (link below in resources) and create a list of data sets available. +- [ ] Find contact information from the Controller's website and get in touch with someone at the office to determine if real time data is available for the public to access. Examples are eviction and LAPD arrest data, which we have historical data for (links below). +- [ ] Document any correspondence with city representatives in a comment below. +- [ ] If real time data is available, add links to the Resources section. + +### Resources +[LA City Controller Website](https://controller.lacity.gov/data) - In the ""Data Sites"" section there are various resources that should be looked at to determine which may be of interest to Hack for LA +[Google Doc of 2022 LAPD Arrests](https://docs.google.com/spreadsheets/d/1rH272RxRxU8ut3xArEnyOz2j_Qlw5ntEoTJlZwcx_Ng/edit#gid=1936642723) - We should see if this arrest data is available in real time. +[Google Doc of 2023 Eviction Data](https://docs.google.com/spreadsheets/d/1d6xgd95m0v914YTePb8m-9Ir9SVKNKx4-Xw8N_4I0pk/edit#gid=1331260170) - Another data set to check if real time data is available. +" +177,"Create 311 data CSV files that can be accessed through a Jupyter notebook","role: data science,project: 311 Data,size: 3pt,feature: project management","closed","mru-hub","None",3,"2023-03-31T20:57:06Z","2024-06-16T06:27:37Z","2024-04-16T02:32:54Z","CONTRIBUTOR","### Overview +We want to download 311 data and split by year, then month, so each is under 100MB and we can host tan append-only data warehouse on GitHub. + +### Action Items +- [x] Get cleaning rules from the 311-data repo and add a link to the rules to Resources below. +- [x] Get city data +- [x] Split by year, then by month +- [ ] Outline what you did to clean the data in a comment below +- [ ] Create Jupyter notebook to access the data and add notes explaining the cleaning rules +- [ ] Create a website (ideally ghpages) that can display the jupyter notebook so that people don't have to know how to download and install one. + +### Resources/Instructions + +**Cleaning Rules**: https://github.com/hackforla/data-science/blob/main/311-data/CSV_files/Docs/CleaningRules.txt +**City Data:**: https://data.lacity.org/browse?q=311%20data%20%2C%202024&sortBy=relevance (Please update the filter for the year 2024 based on the requirements.) + + + + + + +) + +" +170,"California Grants Portal Data Set EDA","role: data analysis,project duration: ongoing,size: 1pt,project: EDA","closed","jossus657","None",9,"2022-07-15T17:08:16Z","2022-11-08T21:32:03Z","2022-11-08T21:32:03Z","CONTRIBUTOR","### Overview +We want to analyze the California Grants Portal Dataset to determine the feasibility of building a tool for public use. + +### Action Items +- [x] Perform exploratory data analysis [EDA] on the Grants Portal dataset. + - [x] See EDA section of tutorial [here](https://github.com/hackforla/data-science/wiki/Intro-to-Data-Analysis-With-Python) +- [x] Attach findings (write-ups, notebooks, etc.) to this issue. + +### Resources/Instructions +[California Grants Portal Website](https://data.ca.gov/dataset/california-grants-portal) +[Data Analysis tutorial with EDA instructions](https://github.com/hackforla/data-science/wiki/Intro-to-Data-Analysis-With-Python) +" +169,"Potential Projects: California Grants Portal","role: data science,epic,size: epic,dependency,project: EDA","closed","mihikasahani","None",7,"2022-07-15T16:58:19Z","2024-02-13T03:48:56Z","2024-02-13T03:48:51Z","CONTRIBUTOR","### Dependency +- [x] EDA on a data set in progress + +### Overview +We want to explore this data set to determine if it is something we can build a public resource for. + +### Action Items +- [x] Find available data sources and add to Resources section +- [ ] Determine is this is one-time or ongoing project (and assign appropriate label) +- [ ] Write one-sheet (see Resources below) + - [ ] Define stakeholder + - [ ] Summarize project, including value add + - [ ] Define project 6 month roadmap + - [ ] Detail history (if any) +- [ ] Define tools to be used for analysis and visualization (if applicable) +- [ ] Create issues required to fulfill project requirements, including exploratory data analysis, required tasks, and deliverables + - [ ] Create data dictionary (EDA task) + - [ ] Perform data cleaning (EDA task) + - [ ] Understand and outline data context + +### Resources/Instructions + +- [California Grants Portal Website](https://data.ca.gov/dataset/california-grants-portal) +- EDA: #170 +- [How to write a onesheet for your project](https://docs.google.com/document/d/14-nYNCjrMrC8lucUNBfLOFQZk2sWbkRlJgigWDAlqlI/edit#heading=h.z4tpbshl55b9)" +163,"CoP: Data Science: Complete Intro to Command Line and Git Tutorial","feature: guide,role: data science,size: 1pt,role: Data Engineer","open","None","None",0,"2022-06-03T18:26:54Z","2024-06-18T21:57:35Z","None","CONTRIBUTOR","### Overview +We want to move an existing Hack for LA tutorial to our repository + +### Action Items +- [ ] Look through the repository to see what content needs to be moved to the tutorials page +- [ ] Add content to tutorial + +### Resources/Instructions +[Original repository](https://github.com/hackforla/intro-cli-git-github) +[Data Science Command Line and Git Tutorial Page](https://github.com/hackforla/data-science/wiki/Introduction-to-Commandline,-Git-and-Github) +" +162,"CoP: Data Science: Complete Intro to Python Tutorial","feature: guide,role: data science,size: 1pt,draft,ready for product","open","None","None",2,"2022-06-03T18:23:12Z","2024-06-18T21:57:38Z","None","CONTRIBUTOR","### Overview +We want to move an existing Hack for LA tutorial to our repository + +### Action Items +- [ ] fork the repository to Hack for LA +- [ ] Look through the repository and our tutorial page to see what content needs to be added +- [ ] Fork repo to your github, using hackforla's version as the upstream +- [ ] Add content to tutorial +- [ ] Commit it to our repo +- [ ] reach out to Nathan on this issue and find out if he wants us to do a PR against his repo. + +### Resources/Instructions +- [Original repository](https://github.com/ndanielsen/intro-python) +- [Data Science Python Tutorial Page](https://github.com/hackforla/data-science/wiki/Introduction-to-Programming-with-Python) +" +161,"Project Name: Lucky Parking","feature: missing,role: data analysis,size: 1pt,project: missing","closed","None","None",2,"2022-05-16T23:31:23Z","2022-07-08T19:37:14Z","2022-06-03T23:13:17Z","MEMBER","Volunteer Opportunity:" +157,"CoP: Data Science: Create Deep Learning Tutorial","documentation,feature: guide,role: data science,size: 1pt","open","elliealbertson","None",1,"2022-04-01T19:26:34Z","2024-07-07T19:24:44Z","None","CONTRIBUTOR","### Overview +Update the [Deep Learning](https://github.com/hackforla/data-science/wiki/Deep-Learning) page with resources and an article header. + +### Action Items +- [ ] Create a Google Doc in the folder provided under resources + - [x] Create preliminary outline + - [ ] Identify resources with vetted tutorials covering important skills within the tutorial area, adding to the draft + - [ ] Draft an introductory paragraph explaining what the tutorial resources cover and why a new data scientist would use them for working with data at Hack For LA +- [ ] Review the draft with the Data Science CoP + - [ ] Make needed updates +- [ ] Add to the wiki page + +### Resources/Instructions +#### Wiki page +[Deep Learning Tutorial](https://github.com/hackforla/data-science/wiki/Deep-Learning) + +#### Location for any files you might need to upload (drafts, images, etc.) +- [Folder for files related to the Deep Learning tutorial](https://drive.google.com/drive/folders/1j_txrPW6uf6MjKTYqw_K1sIUtsZvbsLy) + - [DS: Deep Learning Tutorial Google Doc](https://docs.google.com/document/d/1YME-GyyStpl9QO3zKD-gatUgg1womzZjsdeYDaom71A/edit) + +#### Tools that are core that should be mentioned: +- PyTorch +- Keras/TensorFlow +- HuggingFace + +#### Examples of resources that would be useful to include: +- Web how-to/tutorial/walk-throughs +- Youtube playlists or videos demonstrating tools +- Links to blogs or platforms with subject matter experts +" +156,"CoP: Data Science: Create Machine Learning Tutorial","documentation,feature: guide,role: data science,size: 1pt","open","BhavanaSai12","None",0,"2022-04-01T19:24:06Z","2024-06-25T03:48:00Z","None","CONTRIBUTOR","### Overview +Update the [Machine Learning](https://github.com/hackforla/data-science/wiki/Machine-Learning) page with resources and an article header. + +### Action Items +- [x] Create a Google Doc in the folder provided under resources + - [x] Draft an introductory paragraph explaining what the tutorial resources cover and why a new data scientist would use them for working with data at Hack For LA + - [x] Identify resources with vetted tutorials covering important skills within the tutorial area, adding to the draft +- [ ] Review the draft with the Data Science CoP +- [ ] Add to the wiki page + +### Resources/Instructions +#### Wiki page +[Machine Learning Tutorial](https://github.com/hackforla/data-science/wiki/Machine-Learning) + +#### Location for any files you might need to upload (drafts, images, etc.) +- [Folder for files related to the Machine Learning tutorial](https://drive.google.com/drive/folders/1Z7ei04hVHBFVCN484_yoqAgQFz8XP3Rz) + - [DS: Machine Learning Tutorial Google Doc](https://docs.google.com/document/d/1MVabfo0fhJQ3-9wQlDn9NLnV0ZvNzP3Fu014Guj_deA/edit) + +#### Tools that are core that should be mentioned: +- XGBoost +- Random Forest + +#### Examples of resources that would be useful to include: +- Web how-to/tutorial/walk-throughs +- Youtube playlists or videos demonstrating tools +- Links to blogs or platforms with subject matter experts + +" +155,"CoP: Data Science: Create Stats Tutorial","documentation,feature: guide,role: data science,role: data analysis,size: 1pt","open","None","None",0,"2022-04-01T19:21:26Z","2024-06-18T21:57:20Z","None","CONTRIBUTOR","### Overview +Update the [Stats](https://github.com/hackforla/data-science/wiki/Almighty-Statistics) page with resources and an article header. + +### Action Items +- [ ] Create a Google Doc in the folder provided under resources + - [ ] Draft an introductory paragraph explaining what the tutorial resources cover and why a new data scientist would use them for working with data at Hack For LA + - [ ] Identify resources with vetted tutorials covering important skills within the tutorial area, adding to the draft +- [ ] Review the draft with the Data Science CoP +- [ ] Add to the wiki page + +### Resources/Instructions +#### Wiki page +[Stats Tutorial](https://github.com/hackforla/data-science/wiki/Almighty-Statistics) + +#### Location for any files you might need to upload (drafts, images, etc.) +- [Folder for files related to the Stats tutorial](https://drive.google.com/drive/folders/1MLPi4eo6BaurUDh-fIBW85ZKPE3jDERu) + - [DS: Stats Tutorial Google Doc](https://docs.google.com/document/d/1FMbaMeVRcVQPiIGW3QvO1ah8ocptNFwLiQjTZ-RUhsU/edit) + +#### Skills that are core that should be mentioned: +- Logistic/Linear Regression +- Experimental Design +- Significance Testing +- Bayesian Analysis + +#### Examples of resources that would be useful to include: +- Web how-to/tutorial/walk-throughs +- Youtube playlists or videos demonstrating tools +- Links to blogs or platforms with subject matter experts + +" +154,"CoP: Data Science: Create Data Ops Tutorial","documentation,feature: guide,size: 1pt,role: Data Engineer","open","None","None",1,"2022-04-01T19:18:14Z","2024-06-18T21:57:17Z","None","CONTRIBUTOR","### Overview +Update the [Data Ops](https://github.com/hackforla/data-science/wiki/Data-Ops) page with resources and an article header. + +### Action Items +- [ ] Create a Google Doc in the folder provided under resources + - [ ] Draft an introductory paragraph explaining what the tutorial resources cover and why a new data scientist would use them for working with data at Hack For LA + - [ ] Identify resources with vetted tutorials covering important skills within the tutorial area, adding to the draft +- [ ] Review the draft with the Data Science CoP +- [ ] Add to the wiki page + +### Resources/Instructions +#### Wiki page +[Data Ops Tutorial](https://github.com/hackforla/data-science/wiki/Data-Ops) + +#### Location for any files you might need to upload (drafts, images, etc.) +- [Folder for files related to the Data Ops tutorial](https://drive.google.com/drive/folders/1j5r7SDEHAUhUbI6DDD3MxvobPfU9WiH-) + - [DS: Data Ops Tutorial Google Doc](https://docs.google.com/document/d/15WdOrTtKGuZL3rPuQm3h4SEWuUL4vML0XtPas8fBIwc/edit) + +#### Tools that are core that should be mentioned: +- EC2 +- Lambda +- RDS +- Athena/Hive +- Flask + +#### Examples of resources that would be useful to include: +- Web how-to/tutorial/walk-throughs +- Youtube playlists or videos demonstrating tools +- Links to blogs or platforms with subject matter experts" +153,"CoP: Data Science: Create Text Analysis Tutorial","documentation,feature: guide,role: data analysis,size: 1pt","open","bfang22","None",3,"2022-04-01T19:15:05Z","2024-06-18T21:59:08Z","None","CONTRIBUTOR","### Overview +Update the [Text Analysis](https://github.com/hackforla/data-science/wiki/Text-Analysis) page with resources and an article header. + +### Action Items +- [ ] Create a Google Doc in the folder provided under resources + - [ ] Draft an introductory paragraph explaining what the tutorial resources cover and why a new data scientist would use them for working with data at Hack For LA + - [ ] Identify resources with vetted tutorials covering important skills within the tutorial area, adding to the draft +- [ ] Review the draft with the Data Science CoP +- [ ] Add to the wiki page + +### Resources/Instructions +#### Wiki page +[Text Analysis Tutorial](https://github.com/hackforla/data-science/wiki/Text-Analysis) + +#### Location for any files you might need to upload (drafts, images, etc.) +- [Folder for files related to the Text Analysis tutorial](https://drive.google.com/drive/folders/1HApzgkLXfsRgzCrIKWFI08m5NfkWxz9W) + - [DS: Text Analysis Tutorial Google Doc](https://docs.google.com/document/d/1-UTjQ_lonjEp1lZ0agjtEgP1sGYUSEvGgKij97qY7A8/edit) + +#### Tools that are core that should be mentioned: +- nltk +- SpaCy + +#### Examples of resources that would be useful to include: +- Web how-to/tutorial/walk-throughs +- Youtube playlists or videos demonstrating tools +- Links to blogs or platforms with subject matter experts +" +152,"CoP: Data Science: Open Source Projects Data Set for Climate projects","role: data analysis,size: 1pt,epic: GreenEarthOS,project: climate projects,draft","open","noneill256","None",12,"2022-03-18T21:04:03Z","2024-06-18T21:59:30Z","None","CONTRIBUTOR","### Overview +We want to monitor various open source projects and create a script that will keep our listing up-to-date with information such as last active. + +### Action Items +- [x] Add each project's URL to the Project URL column (You can see that if you hover over the Project name and description cells. You can also try to pull it from the Project URLs link in Resources below) +- [x] Create a script that will visit each URL and update the following columns (Note that you may have to work around +API limits): + - [x] Project URL + - [x] Last Active Date (Date) + - [x] Number of contributors + - [ ] Connect with volunteer who developed script to copy files over to Hack for LA repository. Code is [here](https://github.com/noneill256/Hack4LA-Project-Spreadsheet-Updater?tab=readme-ov-file). + - [ ] Assess current script and make necessary edits so it can be automated to run and produce output periodically. + - [ ] Develop a data dashboard to display information in a useful way + +### Resources/Instructions +Spreadsheet of open source projects we want to monitor [here](https://docs.google.com/spreadsheets/d/1LFResU_pcP5IMwz92dmPQRoKJ4lNa3tvr-_COJiE_hc/edit#gid=0) +Original source for project URLs [here](https://opensustain.tech/) +[Info about getting URL from a Google Sheet cell](https://www.oksheets.com/extract-hyperlink-url/#:~:text=Extract%20the%20URL%20from%20a%20Hyperlink%20by%20Edit%20and%20Copy&text=You%20can%20right%2Dclick%20on,option%20from%20the%20Insert%20menu.) +" +149,"Weekly Label Check","role: product,size: 1pt,feature: project management","open","None","None",17,"2022-02-25T21:38:09Z","2024-03-29T19:41:34Z","None","MEMBER","### Dependency +2022-02-24 + +### Overview +We need to perform a weekly check on issues to make sure they all conform to our label and milestone schema. + +### Action Items +- [ ] Check to see if there are any issues that have not yet been added to [project board](https://github.com/hackforla/data-science/issues?q=is%3Aissue+is%3Aopen+no%3Aproject) + - [ ] if yes, add to project board +- [ ] Check the following to make sure that they are all 0/0 (Links for checking to make sure that all issues either have a ___ or a ____ missing label) + - [feature and project](https://github.com/hackforla/data-science/issues?q=-label%3A%22project%3A+health+services%22+-label%3A%22project%3A+metro+311+correlation%22+-label%3A%22project%3A+district+types%22+-label%3A%22project%3A+structured+context%22+-label%3A%22project%3A+311-data-dashboards%22+-label%3A%22project%3A+311+Data%22+-label%3A%22project%3A+missing%22+-label%3A%22project%3A+health+services%22+-label%3A%22project%3A+metro+311+correlation%22+-label%3A%22project%3A+structured+context%22+-label%3A%22project%3A+311-data-dashboards%22+-label%3A%22project%3A+311+Data%22+-label%3A%22project%3A+seie%22+-label%3A%22project%3A+OCS%22+-label%3A%22project%3A+missing%22+is%3Aissue+-label%3A%22feature%3A+project+management%22+-label%3A%22feature%3A+guide%22+-label%3A%22feature%3A+onboarding%22+-label%3A%22feature%3A+recruitment%22+-label%3A%22feature%3A+missing%22+-label%3A%22feature%3A+labels%22+-label%3A%22project%3A+Native+Land+Attribution%22+-label%3A%22project%3A+LAANE%22+-label%3A%22project%3A+Climate+Collabathon%22+-label%3A%22project%3A+climate+projects%22+-label%3A%22feature%3A+branding%22+-label%3A%22project%3A+EDA%22+-label%3A%22project%3A+Data+Science+CoP+Maintenance%22+-label%3A%22project%3A+MC+Southern+Africa+NGOs%27+OS+usage%22+is%3Aopen) The feature and project missing tags are mutually exclusive. Please use the correct label. + - [role](https://github.com/hackforla/data-science/issues?q=is%3Aopen+is%3Aissue+-label%3A%22role%3A+product%22+-label%3A%22role%3A+CoP+lead%22+-label%3A%22role%3A+data+analysis%22+-label%3A%22role%3A+data+science%22+-label%3A%22role%3A+Data+Engineer%22+-label%3A%22role%3A+missing%22+-label%3A%22role%3A+org%22) + - [size](https://github.com/hackforla/data-science/issues?q=is%3Aopen+is%3Aissue+-label%3A%22size%3A+epic%22+-label%3A%22size%3A+missing%22+-label%3A%22size%3A+1pt%22+-label%3A%22size%3A+2pt%22+-label%3A%22size%3A+3pt%22+-label%3A%22size%3A+5pt%22+-label%3A%22size%3A+8pt%22+-label%3A%22size%3A+13pt%22+-label%3A%22size%3A+0.5pt%22+-label%3A%22size%3A+0.25pt%22) + - [ ] Add any label missing (e.g., a new feature label go added to the project, but didn't yet get added here) and update link +- [ ] if changes were made, recheck to make sure all links ended up 0/0 +- [ ] Audit for issues with Missing labels (to determine if we are making progress) + - [ ] Copy this audit report format to a comment below +``` +Date: +- [ ] Did you have to update the 0/0 links above? if yes, please say what you did (e.g, Added features to the features check above or added an issue to the project board that has not been added yet). +- [ ] Report what the numbers are on issues that are missing labels (e.g., size, ending at 12/5) + - [ ] feature, ending at + - [ ] project, ending at + - [ ] role, ending at + - [ ] size, ending at + - [x] milestone, ending at (don't have yet, go ahead and skip) +``` + - [ ] perform audit, adding your numbers to your audit report comment + - [feature](https://github.com/hackforla/data-science/issues?q=is%3Aopen+is%3Aissue+label%3A%22feature%3A+missing%22) + - [project](https://github.com/hackforla/data-science/issues?q=is%3Aopen+is%3Aissue+label%3A%22project%3A+missing%22) + - [role](https://github.com/hackforla/data-science/issues?q=is%3Aopen+is%3Aissue+label%3A%22role%3A+missing%22) + - [size](https://github.com/hackforla/data-science/issues?q=is%3Aopen+is%3Aissue+label%3A%22size%3A+missing%22) + - [epic](https://github.com/hackforla/data-science/issues?q=is%3Aopen+is%3Aissue+label%3A%22epic%3A+missing%22) + +### Epic + - [epic](https://github.com/hackforla/data-science/issues?q=is%3Aopen+is%3Aissue+-label%3A%22epic%3A+missing%22+-label%3A%22epic%3A+potential+projects%22+-label%3A%22epic%22+-label%3A%22epic%3A+311+public+data+sets%22+-label%3A%22epic%3A+internal+projects%22+-label%3A%22epic%3A+empowerla.org%22+) + + +#### Icebox issue +- [ ] [Icebox Issues w no dependency](https://github.com/hackforla/data-science/projects/1?card_filter_query=-label%3Adependency#column-139348530)" +160,"CoP: Data Science: Survey: Repo Labels","project duration: ongoing,size: 2pt,time sensitive,role: Data Engineer,feature: labels,feature: Issues Dashboard","closed","jossus657","time sensitive",31,"2022-02-13T23:40:23Z","2024-06-18T21:59:39Z","2023-03-31T19:20:56Z","MEMBER","### Overview +We need to survey labels across the organization so that we can rationalize and do automation and org-wide audits. + +#### Additional Details +We already have an automation running on the github.com/hackforla/website repo that adds labels that start with ""missing:"" and let the user know what other labels are required. The user can still add optional labels, but they must use the minimum. We want to roll this automation out to all the teams, but in order to do so, they must all be uses the minimum labeling in the same way. + +We have a [kanban guide](https://docs.google.com/document/d/11Fe7mNdmPBP5bD_yLJ1C0_I1TmoK47AuHHrdhdDyWCs/), but it's confusing to users if all the projects don't use similar labels, so we want to have a base set of labels that will be documented in our instructions. + +### Action Items +- [x] Define data schema +- [x] Create spreadsheet +- [x] Write a script that gets a list of all labels for each repo across all orgs controlled by hack for la +- [x] Find main label types +- [ ] Analysis by appearance +- [ ] Release dependency on hackforla/ops#15 + +### Resources/Instructions +#### Orgs to poll +- Hackforla +- 100automations +- civictechindex +- civictechstructure +- hackla-engage + +#### Data Schema +- name of org +- name of repo +- title of label +- description of label +- type of label (role, size, priority, difficulty/complexity, status, feature, p-feature, etc) +- number of issues per label + +### Resources +- [DS Google Drive, label audit folder](https://drive.google.com/drive/u/0/folders/1l8bMfhmUPG1O3nCLgvIE7jd_HlqAIb1s) +- [combined_csv - google sheet](https://docs.google.com/spreadsheets/d/1KOCPa4KPE9GPwVdwkUCyvXwLLqUF2TP2rDlOodN62xc/edit#gid=614668317) +- [Last output from 8/2/22: label.csv](https://github.com/hackforla/data-science/files/9245408/label.csv)" +148,"Create Geospatial Data Analysis Tutorial","documentation,feature: guide,role: org,size: 1pt","closed","mcmorgan27","None",4,"2022-02-04T04:46:32Z","2022-03-25T16:08:03Z","2022-03-25T16:08:03Z","CONTRIBUTOR","### Overview +Update the [Geospatial Data Analysis](https://github.com/hackforla/data-science/wiki/Geospatial-Data-Analysis) with resources and an article header. + +### Action Items +Update the Tutorial with: +- An introductory paragraph explaining what the tutorial resources cover and why a new data scientist would use them for working with data at Hack For LA +- A resources section listing vetted tutorials covering important skills within the tutorial area + +### Resources/Instructions +Tools that are core that should be mentioned: +- Geopandas +- Shape files +- Coordinate Systems +- GIS +- Data sources + +Examples of resources that would be useful to include: +- Web how-to/tutorial/walk-throughs +- Youtube playlists or videos demonstrating tools +- Links to blogs or platforms with subject matter experts +" +147,"Create Data Engineering Tutorial","documentation,feature: guide,role: org,size: 1pt","closed","jonmelvin","None",9,"2022-01-21T05:33:37Z","2022-05-12T20:56:34Z","2022-05-12T20:56:34Z","CONTRIBUTOR","### Overview +We need to add content to the Data Engineering tutorial wiki with resources and an article header, so that new data scientist joining our organization will have them for working with data at Hack for LA. + +### Action Items +- [ ] Create a Google Doc in the folder provided under resources + - [ ] Draft an introductory paragraph explaining what the tutorial resources cover and why a new data scientist would use them for working with data at Hack For LA + - [ ] Identify resources with vetted tutorials covering important skills within the tutorial area, adding to the draft +- [ ] Review the draft with the Data Science CoP +- [ ] Add to the wiki page + +### Resources/Instructions +#### Wiki page +[Data Engineering Tutorial](https://github.com/hackforla/data-science/wiki/Data-Engineering) + +#### Location for any files you might need to upload (drafts, images, etc.) +- [Folder for files related to the Data Engineering tutorial](https://drive.google.com/drive/folders/1tgnZPI7XJzu0xTkliBnMN8dswnQ_5Q97) + - [DS: Data Engineering Tutorial Google Doc](https://docs.google.com/document/d/1CqEiUtNMs-l83tiIwvciKx9FCkj3fgBhJ-Hmj7kEGFc/edit) + +#### Tools that are core that should be mentioned: +- SQL +- NoSQL + +#### Examples of resources that would be useful to include: +- Web how-to/tutorial/walk-throughs +- YouTube playlists or videos demonstrating tools +- Links to blogs or platforms with subject-matter experts +" +146,"Create Web Scraping Tutorial","documentation,feature: guide,role: org,size: 1pt","open","parcheesime","None",14,"2022-01-21T05:31:23Z","2022-10-14T03:13:48Z","None","CONTRIBUTOR","### Overview +Update the [Web Scraping](https://github.com/hackforla/data-science/wiki/Webscraping) with resources and an article header. + +### Action Items +- [x] Create a Google Doc in the folder provided under resources + - [x] Draft an introductory paragraph explaining what the tutorial resources cover and why a new data scientist would use them for working with data at Hack For LA + - [x] Identify resources with vetted tutorials covering important skills within the tutorial area, adding to the draft +- [x] Review the draft with the Data Science CoP +- [x] Add to the wiki page + +### Resources/Instructions +#### Wiki page +[Web Scraping Tutorial](https://github.com/hackforla/data-science/wiki/Webscraping) + +#### Location for any files you might need to upload (drafts, images, etc.) +- [Folder for files related to the Web Scraping tutorial](https://drive.google.com/drive/folders/13GEPhfLPk4MCpEIpzNwtwJBqivmh4etS) + - [DS: Web Scraping Tutorial Google Doc](https://docs.google.com/document/d/15774XE_xw7sDIjUmW95ThttgYFQ-1OVFHWpjOETNH2I/edit) + +#### Tools that are core that should be mentioned: +- Python +- Selenium +- BeautifulSoup +- Requests + +Consider linking to [these videos Sophia made](https://drive.google.com/drive/folders/1fWIRwKI6vUj2RT8YPFHXG9eHMdNSfty_) + +#### Examples of resources that would be useful to include: +- Web how-to/tutorial/walk-throughs +- Youtube playlists or videos demonstrating tools +- Links to blogs or platforms with subject matter experts +" +145,"CoP: Data Science: Create Data Analysis With R Tutorial","documentation,feature: guide,role: data analysis,size: 1pt","open","xuyeluo","None",5,"2022-01-21T05:27:52Z","2024-07-09T00:07:17Z","None","CONTRIBUTOR","### Overview +Update the [Data Analysis With R](https://github.com/hackforla/data-science/wiki/Data-Analysis-With-R) with resources and an article header. + +### Action Items +- [ ] Create a Google Doc in the folder provided under resources + - [ ] Draft an introductory paragraph explaining what the tutorial resources cover and why a new data scientist would use them for working with data at Hack For LA + - [ ] Identify resources with vetted tutorials covering important skills within the tutorial area, adding to the draft +- [ ] Review the draft with the Data Science CoP +- [ ] Add to the wiki page + +### Resources/Instructions +#### Wiki page +[Data Analysis with R Tutorial](https://github.com/hackforla/data-science/wiki/Data-Analysis-With-R) + +#### Location for any files you might need to upload (drafts, images, etc.) +- [Folder for files related to the Data Analysis tutorial](https://drive.google.com/drive/folders/1vwqkqIZsElTmQm36pPmAvsTlRCjHkZWo) + - [DS: Data Analysis with R Tutorial Google Doc](https://docs.google.com/document/d/1JfpbJDxPyzFusHrxWh57wV1kqnb5i6KFloXWCSFcG28/edit) + +#### Tools that are core that should be mentioned: +- Tidyverse +- CRAN + +#### Examples of resources that would be useful to include: +- Web how-to/tutorial/walk-throughs +- Youtube playlists or videos demonstrating tools +- Links to blogs or platforms with subject matter experts +" +144,"Create Data Visualization Tutorial","documentation,feature: guide,role: org,size: 1pt,feature: needs peer review","closed","None","None",4,"2022-01-21T05:22:40Z","2022-05-05T18:10:49Z","2022-05-05T18:10:34Z","CONTRIBUTOR","### Overview +Update the [Data Visualization Tutorial](https://github.com/hackforla/data-science/wiki/Data-Visualization) with resources and a header explaining + +### Action Items +- [x] Create a Google Doc in the folder provided under resources + - [x] Draft an introductory paragraph explaining what the tutorial resources cover and why a new data scientist would use them for working with data at Hack For LA + - [x] Identify resources with vetted tutorials covering important skills within the tutorial area, adding to the draft +- [x] Review the draft with the Data Science CoP +- [x] Add to the wiki page + +### Resources/Instructions +#### Wiki page +[Data Visualization Tutorial](https://github.com/hackforla/data-science/wiki/Data-Visualization) + +#### Location for any files you might need to upload (drafts, images, etc.) +- [Folder for files related to the Data Visualization tutorial](https://drive.google.com/drive/folders/1IiTuyJlJ3o1KRVXh8l9cpjrPu7zSVm1s) + - [DS: Data Visualization Tutorial Google Doc](https://docs.google.com/document/d/1umiXKgA0qnK3VpArEfIkENq-Nm4YJHTMzHE7laHKCRE/edit) + +#### Tools that are core that should be mentioned: +- Pandas +- Seaborn +- Matplotlib +- Tableau + +#### Examples of resources that would be useful to include: +- Web how-to/tutorial/walk-throughs +- Youtube playlists or videos demonstrating tools +- Links to blogs or platforms with subject matter experts +" +143,"Create ETL/Data Cleaning Tutorial","documentation,feature: guide,role: org,size: 1pt","closed","ryanmswan","None",2,"2022-01-21T05:19:10Z","2023-03-31T19:14:05Z","2023-03-31T19:14:05Z","CONTRIBUTOR","### Overview +Update the [ETL/Data Cleaning Tutorial](https://github.com/hackforla/data-science/wiki/ETL-Data-Cleaning-Resources) with resources and a header explaining + +### Action Items +Create [wiki page in the data-science repo](https://github.com/hackforla/data-science/wiki) for the listed subject +Update the Tutorial with: +- An introductory paragraph explaining what the tutorial resources cover and why a new data scientist would use them for working with data at Hack For LA +- A resources section listing vetted tutorials covering important skills within the tutorial area + +### Resources/Instructions +Examples of resources that would be useful to include: +- Web how-to/tutorial/walk-throughs +- Youtube playlists or videos demonstrating tools +- Links to blogs or platforms with subject matter experts +" +141,"Obtain Shape Files for Different District Types (2023)","good first issue,role: data analysis,project duration: one time,dependency,size: 1pt,project: district types","closed","mru-hub","None",2,"2021-12-10T19:43:11Z","2023-07-21T03:58:29Z","2023-07-21T03:58:29Z","CONTRIBUTOR","### Overview +We need to obtain new shape files for the various district types that represent stakeholders we work with once they reflect redistricting. This is part of the epic issue #118 and should be done annually until we can automate the process. + +### Action Items +- [x] Save shape files updated in 2022 to the data-science repo. Folder is [here](https://drive.google.com/drive/folders/13oZXXIfYlEzW0LaPL8OhrmGGigBkd8K6) + +- [x] Save shape files updated in 2023 to the Google drive folder [here](https://drive.google.com/drive/folders/1ZQWeoe9O6NdC2KXIZ2WQJbcct7vsvDMS): + - [x] [Neighborhood Councils (NCs)](https://empowerla.org/councils/) | [_Shape Files Here_](https://geohub.lacity.org/datasets/neighborhood-council-boundaries-2018/explore) + - [x] [Supervisory Districts](https://bos.lacounty.gov/About-Us/Board-of-Supervisors) | [_Shape Files Here_](https://egis-lacounty.hub.arcgis.com/datasets/c7d0324969684262a7f01ce9222c8657/explore) + - [x] [Business Improvement Districts (BIDs)](https://clerk.lacity.org/clerk-services/bids) | [_Shape Files Here_](https://data.lacity.org/Housing-and-Real-Estate/Business-Improvement-Districts/733b-ryrk) + - [x] [U.S. House of Representatives that serve LA](http://www.laalmanac.com/government/gu02map.php) | [_Shape Files Here_](https://wedrawthelines.ca.gov/maps-final-draft-congressional-districts/) + - [x] [California State Senators that serve LA](https://www.senate.ca.gov/) | [_Shape Files Here_](https://wedrawthelines.ca.gov/maps-final-draft-senate-districts/) + - [x] [California Assembly members that serve LA](https://www.assembly.ca.gov/assemblymembers) | [_Shape Files Here_](https://wedrawthelines.ca.gov/maps-final-draft-assembly-districts/) + - [x] Zip Code | [_Shape Files Here_](https://hub.arcgis.com/datasets/lacounty::la-county-zip-codes/explore) + - [x] LA Country City Boundaries | [_Shape Files Here_](https://egis-lacounty.hub.arcgis.com/datasets/lacounty::city-boundaries-lines/about) + + Note that some of these (i.e. zip codes) may not be updated with redistricting. + +### Resources/Instructions +Google Drive folder for storing files [here](https://drive.google.com/drive/folders/1KsIfAFmp0ArLauvHY1k9wRc9ZXaPDahe)" +140,"Create a logo for the Data Science CoP","project duration: one time,size: 2pt,feature: branding,role: design","closed","None","None",4,"2021-12-01T07:04:58Z","2024-06-18T02:59:55Z","2022-09-15T22:45:43Z","MEMBER","### Overview + +Create a logo for the Data Science CoP, for use in powerpoint presentations and deliverables. We currently have a draft version in need of refinement. It will likely need to be rebuilt. + +### Action Items + +- [x] Refine current draft of logo + - [x] Simplify visually - fewer elements, no backdrop, better perspective angle + - [x] Find clearer placement for Hack for LA logo within or alongside Data Science logo. + +### Resources/Instructions + +#### Latest Files + +The latest files are saved in this folder in the Data Science shared Google Drive: +https://drive.google.com/drive/u/0/folders/1d0IrLzTGEWP5PkGS0aocLi1tCK2jSt8b + +The latest file is an svg made with google drawing. The [main Base Logo file is here](https://docs.google.com/drawings/d/1ARUH58iSl4_cMo5RyQ8xhPcAmfjnf3uZF9mjcF7iTBU/edit) and the [Favicon file is here](https://docs.google.com/drawings/d/1hb_1iSWR0XYV2bZMOqsKa_7X3hqW-uMDjhryaJ6R-7U/edit). + +##### Previous Draft +The previous draft was made by combining a matplotlib background (for the beloved(?) data science aesthetic) with other elements photoshopped into place. The files can be found in the [logo branch](https://github.com/hackforla/data-science/tree/logo) of the repo here: + +- [Backdrop rendered in jupyter notebook](https://github.com/hackforla/data-science/tree/logo/logo/Logo_Elements/3d_backdrop/logo_backdrop_matplotlib.ipynb) +- [Photoshop file](https://github.com/hackforla/data-science/tree/logo/logo/Logo_Assembly_cropped.psd) +- [Draft logo](https://github.com/hackforla/data-science/tree/logo/logo/Drafts/Logo_outlined.png) +- [Alternate draft 1](https://github.com/hackforla/data-science/tree/logo/logo/Drafts/Logo_Assembly_option_1.png) +- [Alternate draft 2](https://github.com/hackforla/data-science/tree/logo/logo/Drafts/Logo_Assembly_option_2.png)" +138,"Starter Project for New Data Analysts and Data Scientists","role: product,role: CoP lead,feature: onboarding,size: epic,dependency","closed","None","None",1,"2021-11-19T20:36:07Z","2021-11-30T19:00:45Z","2021-11-30T19:00:45Z","CONTRIBUTOR","### Dependency +Discuss how this integrates with Hack for LA plans to engage new data scientists and get them working on productive projects. + +### Overview +CoP lead proposed developing a starter project using a toy data set to help new recruits understand various tasks and allow mentors to provide useful feedback based on deeper understanding of the project and data set. + +### Action Items +- [ ] Determine what skills should be used in the starter project. Possibilities include: + - Data cleaning + - Documentation (i.e. adding structured context) + - Technologies (i.e. Jupyter, Python, numPy, SQL,,etc.) + - Summary of findings +- [ ] Collect useful resources and documentation for recruits to refer to +- [ ] Select data set(s) to be used for project +- [ ] Create notebook with step-by-step instructions +- [ ] Get feedback from CoP + +### Resources/Instructions +" +137,"Data Science CoP Meeting Agendas (Monday 7PM PST)","feature: guide,role: product,size: 1pt,feature: agenda","open","akhaleghi","None",40,"2021-11-19T04:37:43Z","2024-03-19T02:10:52Z","None","CONTRIBUTOR","### Overview +This issue tracks the agenda for the Data Science CoP meetings. + +### Weekly Agenda, Recordings, and Transcripts +- November 2021: [2021-11-18](https://github.com/hackforla/data-science/issues/137#issuecomment-973737827), [2021-11-19](https://github.com/hackforla/data-science/issues/137#issuecomment-974421073) +- December 2021: [2021-12-02](https://github.com/hackforla/data-science/issues/137#issuecomment-985213751), [2021-12-16](https://github.com/hackforla/data-science/issues/137#issuecomment-996413245) +- January 2022: [2022-01-06](https://github.com/hackforla/data-science/issues/137#issuecomment-1007118306), [2022-01-13](https://github.com/hackforla/data-science/issues/137#issuecomment-1012720668), [2022-01-20](https://github.com/hackforla/data-science/issues/137#issuecomment-1017927346), [2022-01-27](https://github.com/hackforla/data-science/issues/137#issuecomment-1023838528) +- February 2022: [2022-02-03](https://github.com/hackforla/data-science/issues/137#issuecomment-1029601494), [2022-02-10](https://github.com/hackforla/data-science/issues/137#issuecomment-1035832929), [2022-02-17](https://github.com/hackforla/data-science/issues/137#issuecomment-1043847040), [2022-02-24](https://github.com/hackforla/data-science/issues/137#issuecomment-1050495019) + +### Resources/Instructions +[Zoom link for Thursday meetings](https://us02web.zoom.us/j/81067015817?pwd=M3l6a0tQTWhLbnlTbEZNOWJ5UXN3QT09) +" +135,"CoP: Data Science: Find and document all the 311 public data sets","role: data analysis,project: 311 Data,size: 2pt,epic: 311 public data sets","open","venkata-sai-swathi","None",5,"2021-11-12T21:47:39Z","2024-07-10T16:39:05Z","None","MEMBER","### Overview +We need to create a definitive list of all the city government data portals for the Greater LA area. + +### Action Items +- [x] Add the datasets from the Harker list into the Spreadsheet of cities and counties in Greater LA +- [x] Google to see if there are any others +- [x] find each cities website and record in spreadsheet +- [x] visiting each city's website to confirm they have one or don't have data portal. +- [x] If there are notes about why they don't have it (article, etc). Please provide a link to that info or paste if short. + +### Resources/Instructions +- [Deliverable: Spreadsheet of cities/counties in Greater LA with URLs](https://docs.google.com/spreadsheets/d/1Vjmof0CfFPGyWDlDbChc_hk1MF6pSdGEwcFcAMeQCIQ/edit#gid=0) +- [Wikipedia list of Cities in LA with links to their pages, and each one probably has a link to the official website](https://en.wikipedia.org/wiki/List_of_cities_in_Los_Angeles_County,_California) +- [Harker List - completed](http://www.harker.com/OpenData/socrata-data-portals.html)" +134,"Overview Dashboard - add titles to graphs","feature: guide,role: data science,project: 311-data-dashboards,size: 1pt,epic: 311 public data sets","closed","chelseybeck","None",1,"2021-11-05T02:01:19Z","2022-03-18T20:07:21Z","2021-11-19T04:01:41Z","MEMBER","### Overview +None of the figures on the dashboards currently have titles. This issue is to add titles to the figures on the Overview page on the dev site. + +### Action Items + +- [x] add title to fig1 +- [x] add title to fig2 +- [x] add title to fig3 +- [x] add title to fig4 +- [x] add title to fig5 +- [x] add title to fig6 + + +### Resources/Instructions + +[311-data project onboarding](https://docs.google.com/document/d/1fNIxKJl91YZ_b6PRvKMdyd7tXJwWbfGIAJ5VTfe8Jow/edit?pli=1#heading=h.c8tc614ce3iu) + +Refer to the documentation in the [311-data dashboards directory](https://github.com/hackforla/311-data/tree/dev/server/dash/dashboards) for creating or modifying dashboards + +[Dash Python User Guide](https://dash.plotly.com/) + +Optional: prototype reports in Jupyter Notebooks -examples found [here](https://drive.google.com/drive/u/3/folders/1P-uID8FpnqwvYlUmzWsmgU4pxRSeb72z). +" +133,"Create labels, features, milestones","role: product,dependency,size: 3pt,feature: project management","open","None","None",2,"2021-11-01T23:40:13Z","2024-02-13T03:28:58Z","None","MEMBER","### Dependency +Figure out which (if any) milestones and milestone labels are needed for DS CoP + +### Overview +We need the board to be easy all team members to navigate and use to be effective, so that we can build capacity and velocity + +### Action Items +#### Make labels +- [x] Labels + - [x] features + - [x] roles + - [x] size + - [x] epics +- [x] label check issue for the following #149 + - [x] features + - [x] roles + - [x] size + - [x] epics + +#### Milestones +- [ ] Start spreadsheet +- [ ] meet with Sophia and Ryan +- [ ] create milestones +- [ ] add milestones to all issues +- [ ] re-prioritize all issues in backlog + +### Resources/Instructions +- https://github.com/hackforla/data-science/issues/149" +131,"Story Size for Data Science Issues","role: product,size: 1pt,feature: project management","closed","akhaleghi","None",0,"2021-10-24T18:07:51Z","2021-11-05T20:47:03Z","2021-11-01T23:36:54Z","CONTRIBUTOR","### Overview +This issue explains the meaning of story points that we use for estimating the relative effort required for issues. + +### Resources/Instructions +When creating new issues, add a ""size"" label based on the estimated time effort required to complete the tasks. _Note that each point roughly equates to 6 hours or less of work._ +- size: 1pt - can be done in 6 hours or less +- size: 2pt - can be done in 7-12 hours +- size: 3pt - can be done in 13-18 hours +- size: 5pt - can be done in 19-30 hours +- size: 8pt - can be done in 31-48 hours +- size: 13+pt - must be broken down into smaller issues." +130,"Create a Guide: Web Scraping","Guide: Research,feature: guide,role: org,size: 5pt,CoP: Data Science","open","parcheesime","None",0,"2021-10-22T16:49:52Z","2024-06-05T12:54:03Z","None","CONTRIBUTOR","### Overview +This issue contains resources to help community members learn more about web scraping in Python, including the use of APIs. + +### Action Items +- [ ] Gather resources, including relevant Hack for LA content (i.e. our tutorials and projects that have used web scraping), online courses, and tutorial/how-to web content. + - [ ] Once done, remove the ""Guide: Research"" label and add the ""Guide: Draft Guide"" label +- [ ] Create a draft template, either in markdown format in this issue or a google doc in the [Data Science google drive](https://drive.google.com/drive/folders/1ZBzxr7Wv3IlDsvIM5wAeHflI4op_X01M) + - [ ] Once done, remove the ""Guide: Draft Guide"" label and add the ""Guide: Create Guide"" label +- [ ] Create a guide on how to use the resources contained, including steps on how to get started for volunteers new to web scraping + - [ ] Once done, remove the ""Guide: Create Guide"" label and add the ""Guide: Review Guide"" label +- [ ] Review the guide with Data Science Communities of Practice + - [ ] Once done, remove the ""Guide: Review Guide"" label and add the ""Guide: Leadership Review"" label +- [ ] Present to Hack for LA leadership team for sign off + - [ ] Once approved, remove the ""Guide: Leadership Review"" label and add the ""Guide: Place Guide"" label +- [ ] Include link to guide under Resources if you add it as a template in .github + +### Resources/Instructions +[Hack for LA Web Scraping Tutorial with Selenium/Docker/Python](https://drive.google.com/drive/folders/1t3AO2A8uOcKYrTGKuEzMLnsZPdPUo9Si) +" +124,"Obtain Shape Files for Different District Types as of Nov/Dec 2021","feature: guide,role: data analysis,project duration: one time,size: 1pt,project: district types","closed","None","None",6,"2021-10-14T21:42:29Z","2023-03-31T01:10:05Z","2023-03-31T01:10:04Z","CONTRIBUTOR","### Overview +We need to obtain shape files for the various district types that represent stakeholders we work with. This is part of the epic issue #118 + +### Action Items +- [x] Identify shape file sources for the following district types: + - [x] [Neighborhood Councils (NCs)](https://empowerla.org/councils/) | [_Shape Files Here_](https://geohub.lacity.org/datasets/neighborhood-council-boundaries-2018/explore) + - [x] [Supervisory Districts](https://bos.lacounty.gov/About-Us/Board-of-Supervisors) | [_Shape Files Here_](https://data.lacounty.gov/GIS-Data/Supervisorial-District-2011-/krcx-r26t) + - [x] [Business Improvement Districts (BIDs)](https://clerk.lacity.org/clerk-services/bids) | [_Shape Files Here_](https://data.lacity.org/Housing-and-Real-Estate/Business-Improvement-Districts/733b-ryrk) + - [x] [U.S. House of Representatives that serve LA](http://www.laalmanac.com/government/gu02map.php) | [_Shape Files Here_](https://wedrawthelines.ca.gov/maps-final-draft-congressional-districts/) + - [x] [California State Senators that serve LA](https://www.senate.ca.gov/) | [_Shape Files Here_](https://wedrawthelines.ca.gov/maps-final-draft-senate-districts/) + - [x] [California Assembly members that serve LA](https://www.assembly.ca.gov/assemblymembers) | [_Shape Files Here_](https://wedrawthelines.ca.gov/maps-final-draft-assembly-districts/) + - [x] Zip Code | [_Shape Files Here_](https://data.lacounty.gov/Geospatial/ZIP-Codes/65v5-jw9f) +- [x] Save shape files to the data-science repo + - [x] Neighborhood Councils (NCs) + - [x] Supervisory Districts + - [x] Business Improvement Districts (BIDs) + - [x] U.S. House of Representatives that serve LA + - [x] California State Senators that serve LA + - [x] California Assembly members that serve LA + - [x] Zip Code +- [ ] Develop policy for updating files + - [ ] How often do the boundaries change for the different district types? + - [ ] Neighborhood Councils (NCs) + - [x] Supervisory Districts + - [ ] Business Improvement Districts (BIDs) + - [x] U.S. House of Representatives that serve LA + - [x] California State Senators that serve LA + - [x] California Assembly members that serve LA + - [x] Zip Code + - [ ] Can these be self-updating via APIs? + +### Resources/Instructions +Google Drive [files here](https://drive.google.com/drive/folders/1KsIfAFmp0ArLauvHY1k9wRc9ZXaPDahe) " +121,"Template For Creating Epics For Data Science Projects","role: product,size: 1pt,feature: project management","closed","akhaleghi","None",2,"2021-10-01T17:43:55Z","2024-04-15T21:15:51Z","2021-11-02T19:57:39Z","CONTRIBUTOR","### Overview +We need to develop a uniform way to approach large data science projects and make it easier to identify smaller subtasks to facilitate completion. + +### Action Items +- [x] Write draft template +- [x] Consult CoP and org leads +- [x] Add template to repository + +### Resources/Instructions +Example Epics: +Issues #106 , #107 +[DS repo add template section](https://github.com/hackforla/data-science/issues/templates/edit) +" +120,"Structured Context for HfLA created data sets","feature: guide,role: data science,size: epic,project: structured context,TG: Draft Template,CoP: Data Science","open","None","None",4,"2021-09-30T16:31:22Z","2024-06-05T12:54:03Z","None","MEMBER","### Overview +We need to have minimum standards for structured context in data sets we create and or deploy in our projects so that we can help consumers understand the context from which they come. + +### Action Items +- [ ] Identify existing resources for structured context, adding each example as a link in the resources section + - [ ] Once done, remove the ""TG: Gather Examples"" label and add the ""TG: Draft Template"" label +- [ ] Create a draft template, in all the formats the data will come in (json, csv, etc.) and upload to repo as a pr in the folder structured-context + - [ ] Once done, remove the ""TG: Draft Template"" label and add the ""TG: Create Guide"" label +- [ ] Discuss plan with data science team +- [ ] Create a guide on how to use the template + - [ ] Once done, remove the ""TG: Create Guide"" label and add the ""TG: Review Guide"" label +- [ ] Review the guide with product management communities of practice + - [ ] Once done, remove the ""TG: Review Guide"" label and add the ""TG: Leadership Review"" label +- [ ] Present to Hack for LA leadership team for sign off + - [ ] Once approved, remove the ""TG: Leadership Review"" label and add the ""TG: Place Guide"" label +- [ ] Possibly create an issue template on .github + - [ ] Include link to template under resources if you add it as a template in .github + +### Resources +- Refer to https://github.com/hackforla/product-management/issues/123 for how to write a guide and the guide template + + +### Resources/Instructions +[Datasheets for Data Sets](https://arxiv.org/pdf/1803.09010.pdf) +Definition of [data context](https://simplicable.com/new/data-context) +Article on [data in context](https://www.idashboards.com/blog/2018/05/02/data-in-context/) +Adding context to [data visualizations](https://www.yellowfinbi.com/blog/2017/03/spoonful-of-context-helps-data-visualization-go-down) +[Civic Data Library](https://www.civicdatalibrary.org) +[Civic Software Foundation](https://www.civicdatalibrary.org)" +118,"CoP: Data Science: Create district types reusable tool (API, single dataset, etc.)","feature: guide,role: data analysis,epic,size: epic,size: 1pt","open","parcheesime","None",13,"2021-09-28T18:48:35Z","2024-06-27T16:20:53Z","None","MEMBER","### Overview +We need to create a tool so that each project at H4LA that renders points on a map can use District Files to help people analyze or view the data. + +## Action Items +- [x] Identify large groups/districts +- [x] Identify links for groups/districts +- [x] Locate and obtain shape files for these districts #124 +- [x] Determine what files types we will make these available (shp, npm, and/or GeoJSON) +- [ ] Put files in GitHub repository so they are available to use in the organization. +- [x] research how we will create a data set out of this info that will be self updating (meaning are there apis for these groups) +- [ ] ... + +## Resources +[Example Neighborhood Council Shape File](https://geohub.lacity.org/datasets/neighborhood-council-boundaries-2018/explore) + +#### Initial Identification of Large Groups/Districts + - City [Neighborhood Councils (NCs)](https://empowerla.org/councils/) + - County [Supervisory Districts](https://bos.lacounty.gov/About-Us/Board-of-Supervisors) + - City [Business Improvement Districts (BIDs)](https://clerk.lacity.org/clerk-services/bids) + - National [U.S. House of Representatives that serve LA](http://www.laalmanac.com/government/gu02map.php) + - State [California State Senators that serve LA](https://www.senate.ca.gov/) + - State [California Assembly members that serve LA](https://www.assembly.ca.gov/assemblymembers)" +94,"Data Science Competitive/Comparative Analysis","feature: guide,role: product,size: 2pt","closed","None","None",0,"2021-09-10T19:27:09Z","2023-03-31T01:16:37Z","2023-03-31T01:16:37Z","CONTRIBUTOR","### Overview +We need to know what competitors have on their sites so that we can use it as a factor when we decide what to put on our site. + +### Action Items + +- [x] Examine competitor websites to determine list of features that should be used for DS site. +- [x] Create list of site features, rank by MoSCoW (move to new issue) + +### Resources/Instructions +[Comparative Analysis Document](https://docs.google.com/spreadsheets/d/138Oul1HQLNuuGvIf0P894cmyo4A2Nf2ZSJtZoZTd26U/edit#gid=1259306930) +[Example from Access the Data Team](https://github.com/hackforla/access-the-data/issues/8)" From 2ba5e53a1409b9c1236f4eb4a037b4b9f221403c Mon Sep 17 00:00:00 2001 From: n2020h <72112832+n2020h@users.noreply.github.com> Date: Mon, 22 Jul 2024 17:01:59 -0400 Subject: [PATCH 16/49] Update issues-to-csv removed fields.yml --- .github/workflows/issues-to-csv.yml | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/.github/workflows/issues-to-csv.yml b/.github/workflows/issues-to-csv.yml index 673fae8..a02c1a7 100644 --- a/.github/workflows/issues-to-csv.yml +++ b/.github/workflows/issues-to-csv.yml @@ -30,15 +30,23 @@ jobs: # curl -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \ # "https://api.github.com/repos/${{ github.repository }}/issues?state=all&per_page=100" | \ # jq -r '.[] | select(.pull_request == null) | [.number, .title, (.labels | map(.name) | join(","))] | @csv' >> issues.csv - - # Generate issues CSV - name: Generate issues CSV run: | - echo "Issue Number,Title,Labels,State,Assignee,Milestone,Comments,Created At,Updated At,Closed At,Author Association,Body" > issues.csv + echo "Issue Number,Title,Labels,State,User Login,User ID,Assignee Login,Assignee ID" > issues.csv curl -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \ "https://api.github.com/repos/${{ github.repository }}/issues?state=all&per_page=100" | \ - jq -r '.[] | select(.pull_request == null) | [.number, .title, (.labels | map(.name) | join(",")), .state, (.assignee | if . != null then .login else "None" end), (.milestone | if . != null then .title else "None" end), .comments, .created_at, .updated_at, (.closed_at // "None"), .author_association, .body] | @csv' >> issues.csv - #force add file to repo, commit and push + jq -r '.[] | select(.pull_request == null) | [ + .number, + .title, + (.labels | map(.name) | join(",")), + .state, + .user.login, + .user.id, + (.assignee | if . != null then .login else "None" end), + (.assignee | if . != null then .id else "None" end) + ] | @csv' >> issues.csv + # Generate issues CSV + - name: Commit and push run: | git config user.name "Automated" From 0bd573fe3be7d4f71eb7905b94622906e33806f0 Mon Sep 17 00:00:00 2001 From: Automated Date: Mon, 22 Jul 2024 21:02:13 +0000 Subject: [PATCH 17/49] Latest data: Mon Jul 22 21:02:13 UTC 2024 --- issues.csv | 1183 +++------------------------------------------------- 1 file changed, 51 insertions(+), 1132 deletions(-) diff --git a/issues.csv b/issues.csv index 4e20a58..812d233 100644 --- a/issues.csv +++ b/issues.csv @@ -1,1132 +1,51 @@ -Issue Number,Title,Labels,State,Assignee,Milestone,Comments,Created At,Updated At,Closed At,Author Association,Body -204,"Recruit volunteers for team open roles","role: missing,epic,ready for product,size: 0.25pt,feature: recruiting","open","None","None",2,"2024-06-14T19:15:48Z","2024-06-22T03:14:32Z","None","CONTRIBUTOR","### Dependency -- [ ] need to recruit new team members to Data Science projects team - -### Overview -We need volunteer(s) to work on the open issues we have so that we can move the project forward. - -### Action Items -#### Creating a new role post -- [ ] Copy the template post from the Community of Practice WIKI, Open Roles Templates (Resource 1.01) -- [ ] Paste the template in a comment below -- [ ] Customize the comment to reflect your open role -- [ ] Open a new issue by using the Open Role template (Resource 1.02) -- [ ] Copy and Paste your customized comment to the new issue -- [ ] Add the label for the role that you are recruiting for -- [ ] Submit the new issue -- [ ] Assign yourself to the new issue -- [ ] Add the issue to the HfLA: Open Roles project board (Resource 1.03) -- [ ] Add the link of the new issue to the related comment on this issue -- [ ] Keep yourself assigned to this issue until the role is filled - -#### Marketing the new role post -- Post a link to the new issue in the Slack channel for the related Practice Area - - [ ] Copy the Slack message (Resource 1.04) - - [ ] Copy the message into the Slack Channel for the relevant Community of Practice (The Slack channels can be found on Resource 1.01) - - [ ] Replace the text `[Replace with LINK TO NEW ISSUE]` with link to the issue - - [ ] Check your team Slack channel for people reaching out after every onboarding and at least every other day - -#### Once Filled -- [ ] Hide the comment on this issue for the role you just filled -- [ ] Add the following comment to the role posting issue - ``` - This role has been filled by @[Replace with GITHUB HANDLE OF PERSON WHO IS TAKING ROLE] - ``` -- [ ] Close the role posting issue -- [ ] Unassign yourself from this issue -- If there are no open roles on this issue (i.e., if there are open roles, there will be unclosed comments) - - [ ] Add the label: Dependency and uncheck the dependency on top of this issue - - [ ] Move this issue to the icebox, on the project board - -### Resources/Instructions -- 1.01 [Community of Practice WIKI, Open Roles Templates, Secondary Issue](https://github.com/hackforla/communities-of-practice/wiki/Open-Roles-Templates#secondary-issue) -- 1.02 [Open Role Template: see Secondary Issue Draft](https://github.com/hackforla/communities-of-practice/wiki/Open-Roles-Templates) -- 1.03 [HfLA: Open Roles board](https://github.com/orgs/hackforla/projects/67) -- 1.04 Copy to post in CoP Slack Channels when there is an open role - ``` - Hi, the Data Science CoP has a volunteer posting, which includes details of how to get in touch with us if you are interested. [Replace with LINK TO NEW ISSUE] - ```" -203,"Prep project boards for Migration","role: product","open","akhaleghi","None",36,"2024-06-10T17:44:29Z","2024-06-24T20:58:57Z","None","CONTRIBUTOR","### Overview -We need to prepare the Project Boards for the forced migration that GitHub is doing in August, so that it is already working well for the team by then. - -### Action Items -- [x] copy this template to a comment below - ```` - ### Text from project board card - ``` - [INSERT MARKDOWN HERE] - ``` - - [ ] Determine where this item goes on the wiki (page and section) - - [ ] copy item to wiki - - [ ] hide this comment when completed. - ```` -- [ ] copy markdown from a card in the ""Start Here"" column of the Project Board into your new comment where it says `[INSERT MARKDOWN HERE] - -#### All issues are on the project board - - [ ] Check to see if all issues are on the project board (resource 1.02) - - [ ] If not, add them to the board, and put them in the correct columns - - [ ] Categorize (can be done later) - -### Resources/Instructions -Project board URLs: -[New board](https://github.com/orgs/hackforla/projects/65) -[Old board](https://github.com/hackforla/data-science/projects/1) -" -202,"Information for New and Existing Members","CoP: Data Science","open","None","None",0,"2024-06-10T17:31:57Z","2024-06-24T21:01:49Z","None","CONTRIBUTOR","**Meeting Times** -Every Monday at 7:00PM Pacific. No meetings on the first Monday of the month. -[Zoom Link](https://us06web.zoom.us/j/87038212377?pwd=bnh3bkJGeWJ0eS94YTRzaVFhZ2Nwdz09) - -**Important Links** -[Google Drive](https://drive.google.com/drive/folders/1maraSDu5BXzCYWFubY834Nq-8AT93FUJ?usp=sharing) -[Data Science Team Roster](https://docs.google.com/spreadsheets/d/1QJltNh1gOybfebe-RkT-xS7m4OtxbuFfaJ4OujeA4h0/edit#gid=0) -Please contact Abe Khaleghi on Slack if you require access to the above sites. - -**For Letters Confirming Hack for LA Participation** -Please see: https://github.com/hackforla/governance/issues/61 - -**Handy Tools and Sites** -[Full Page Screenshot Chrome ext](https://chrome.google.com/webstore/detail/gofullpage-full-page-scre/fdpohaocaechififmbbbbbknoalclacl?hl=en) -[311 Data API Access](https://dev-api.311-data.org/docs) -" -200,"EPA Data Set","","open","salice","None",2,"2024-04-30T02:37:50Z","2024-07-01T17:04:25Z","None","CONTRIBUTOR","### Overview -REPLACE THIS TEXT -Text here that clearly states the purpose of this issue in 2 sentences or less. - -### Action Items -### Action Items -- [ ] Add all data sources to Resources section below - - [ ] EDA Tasks - - [ ] Combine data from years into one data set and see differences - - [ ] Data Dictionary - - [ ] Data Cleaning -- [ ] Write one-sheet - - [ ] Define stakeholder (Access the data and 311 teams used for educational puposes) - - [ ] Summarize project including value add - - [ ] Define project 6 month roadmap - - [ ] Detail history (if any) -- [ ] Define tools to be used to visualize combined data - -Additional tasks TBD - -### Resources/Instructions -- [Data source](https://www.epa.gov/outdoor-air-quality-data/download-daily-data) -- Is there a link to an API to access the data? -" -198,"Update the About us page on wiki","role: product,feature: onboarding,project duration: one time,ready for product,project: Data Science CoP Maintenance,size: 0.25pt","open","max1million101","time sensitive",0,"2024-03-29T19:21:52Z","2024-07-04T20:25:37Z","None","MEMBER","### Overview -We need to have contact information and overview of who is running the Data Science Community of Practice - -### Action Items -- [x] Change ""Mentors"" to be Current Mentors -- [ ] Move Ryan Swan to a section of page, with the following header - ``` - ### Former Mentors - ``` -- [ ] Update product manager to list Abe's name and bio -- [ ] Update product manager section to add Tania Khan to the page - -### Resources/Instructions -https://github.com/hackforla/data-science/wiki/About-us" -194,"Create data dictionary (EDA task)","feature: missing,role: missing,size: missing,project: missing","open","None","None",1,"2024-02-18T06:36:30Z","2024-03-29T19:35:15Z","None","MEMBER", -193,"DRAFT: Access to ""Third Spaces""","feature: missing,role: missing,size: missing,project: missing","open","None","None",0,"2024-02-12T22:46:40Z","2024-03-29T19:35:14Z","None","CONTRIBUTOR","### Overview -Project idea from Karina to examine accessibility to green spaces (parks, public beaches, etc.) within the City of LA. Ideas include: - - Combining metro/crime (e..g, vehicle crash data) data sets to see how accessible (transportation-wise) third green spaces are for the broad population. - - Examine population density info get a sense of the size of parks x population it’s serving in a mile-radius. Analysis can be helpful in many (deciding where to hold public events, determine underserved areas for new green spaces, etc) - -### Data Sources Starting Point -Some data sources to start exploration can be found here: -- [LA Metro dataset](https://developer.metro.net/gis-data/) -- [LA City Parks](https://www.google.com/url?sa=t&rct=j&q=&esrc=s&source=web&cd=&ved=2ahUKEwik1LT4oKeEAxUtMUQIHeT5DJMQFnoECBUQAQ&url=https%3A%2F%2Fdata.lacity.org%2FParks-Recreation%2FDepartment-of-Recreation-and-Parks-GIS-Map-of-Park%2Fnuub-r4zx&usg=AOvVaw0ixMbMAc7m57sctXQOpKht&opi=89978449) -- [LA census data ](https://data.census.gov/table?g=050XX00US06037) - -_Additional Data sources may be used for this project._ - -### Next steps -- [ ] Decide if this should be one issue or multiple. -- [ ] Decide on what additional analyses we can do -- [ ] Complete Project Proposal outlining: Objective, Expected Deliverable(s), Data Sources & Limitations, Research outline. Ideally, proposal should outline possible stakeholder(s) (e.g, LA resident, city council) of project deliverables. - -### Project may involve: -Python, GIS data, data analytics, dashboard-building, data modeling - - -" -191,"DRAFT: MediaWiki NLP Project","feature: missing,role: missing,size: missing,project: missing","open","salice","None",1,"2024-01-09T03:29:17Z","2024-04-12T19:24:57Z","None","CONTRIBUTOR","### Overview -We want to generate ideas to prepare this issue for our CoP - -### Action Items -- [ ] Review MediaWiki API site [here](https://www.mediawiki.org/wiki/API:Main_page) - -### Resources/Instructions - -" -190,"Create Base64 and Env File Tutorial","feature: missing,role: missing,size: missing,project: missing","open","None","None",0,"2023-12-01T16:26:17Z","2024-03-29T19:35:13Z","None","MEMBER","### Overview - -Write a tutorial on base64 and usage of .env files based on experience using them when completing automation dashboard project for Hack for LA website team. This would be useful for a data scientist/ analyst who needs to upload their Python script to a GitHub repository without compromising the security of confidential API keys and other (privacy) information. - -### Action Items -- [x] Create an introduction that gives context as to the purpose/use of the tutorial and connects the 2 concepts based on commonality -- [x] Create full walkthroughs of process to use both -- [x] Search for useful resources to complement tutorial -- [x] Add screenshots and resize them to appropriate sizes -- [x] Proofread for misleading language -- [x] Message Sophia and Karina to get tutorial approved - -### Resources/Instructions -All resources have been included in the body of the tutorial - -Wiki Tutorial Created: [Introduction to Keeping Confidential Information Safe on GitHub: GitHub secrets and .env files](https://github.com/hackforla/data-science/wiki/Introduction-to-Keeping-Confidential-Information-Safe-on-GitHub:-GitHub-secrets-and-.env-files) - -" -189,"Refining the Requirements for Skills Match Dashboard","role: missing,size: missing,project: missing,feature: skills / productivity dashboard","open","n2020h","None",6,"2023-11-21T03:52:00Z","2024-04-15T21:06:37Z","None","MEMBER","### Overview -We need to gather previous relevant documentation of features and functions skills dashboard and refine requirements, so that we can develop it. - -### Action Items - -- [ ] Bonnie will add links to prior known issues from other projects -- [ ] - -### Resources/Instructions -https://docs.google.com/spreadsheets/d/1SV0j3NKcjOpHuT3okyM36xwWA966hNA4N1zmqRtd8B8/edit#gid=11574199 -" -183,"MERL Center Data Cleaning","role: missing,project duration: one time,size: 1pt,project: MC Southern Africa NGOs' OS usage","closed","None","None",8,"2023-10-11T18:00:56Z","2024-01-16T20:39:32Z","2024-01-16T20:39:25Z","MEMBER","### Overview -We need data collected through KoboToolbox to be analyzed and then visualized so that the findings can be incorporated in an article for the MERL Center -### Action Items -- [ ] Clean data -- [ ] Provide analysis -- [ ] Create visualization - -### Resources/Instructions -- [MERL Center Folder on Data Science CoP Drive](https://drive.google.com/drive/folders/1gXWyLtwfl697hCVMS-D1tC0djPf3woV5?usp=drive_link) -- [Survey Data](https://docs.google.com/spreadsheets/d/15zLn9NBXkadAsA8SjuXwGHjGaaJJQUcy05V9Ksm0oh0/edit?usp=drive_link) - - First Tab: Survey data with PII removed - - Second Tab: Bonnie's demonstration for data science community (wrapped column headers, highlighted related columns with colors, starting at GN through HG) - - Please note: column K has duplicate numbers to indicate multiple people from the same organization (meaningful because in column L, different people from the same organization indicated different category) -- [Survey Form](https://ee.kobotoolbox.org/x/and3RucH)" -182,"CoP: Data Science: Active and Inactive Businesses of LA County","role: data science,epic,size: 3pt,project: EDA","open","rahul897","None",22,"2023-08-30T18:11:02Z","2024-06-25T02:13:03Z","None","CONTRIBUTOR","### Prerequisite(s) -If you would like to work on this issue, please add a comment below and include the following information: -- Your name -- How many hours you can commit to working on this in the next week (minimum of 2) -- Commit to providing an update with a comment before the next community of practice meeting - -For example: -- John Doe -- I can commit to working on this issue 3 hours in the following week. -- Yes, I will provide an update on my progress with a comment below. - -Once you have done this, please add yourself to the “Assignees” section on the right and update the issue weekly to document your progress. - -### Overview -We want to create a usable dataset of active and inactive businesses to perform various time series analyses (i.e. visualizing business closures during the covid pandemic). - -### Action Items -Phase 1 -- [ ] Find available data sources and add to Resources section -- [ ] Create data dictionary (EDA task) -- [ ] Create issues required to fulfill project requirements, including exploratory data analysis, required tasks, and deliverables - - [ ] Perform data cleaning (EDA task) - - [ ] Understand and outline data context -- [ ] Write one-sheet (see Resources below) - - [ ] Define stakeholder - - [ ] Summarize project, including value add - - [ ] Define project 6 month roadmap - - [ ] Detail history (if any) - -### Resources/Instructions -[Data source](https://data.lacity.org/Administration-Finance/Listing-of-All-Businesses/r4uk-afju) for business listings in LA County. -" -181,"CoP: Data Science: Affordable Housing Access to Food Pantries","role: data science,epic,size: 3pt,project: EDA","closed","MDivyaPrakash","None",10,"2023-08-30T18:00:11Z","2024-06-26T07:08:36Z","2024-06-26T07:08:36Z","CONTRIBUTOR","### Prerequisite(s) -If you would like to work on this issue, please add a comment below and include the following information: -- Your name -- How many hours you can commit to working on this in the next week (minimum of 2) -- Commit to providing an update with a comment before the next community of practice meeting - -For example: -- John Doe -- I can commit to working on this issue 3 hours in the following week. -- Yes, I will provide an update on my progress with a comment below. - -Once you have done this, please add yourself to the “Assignees” section on the right and update the issue weekly to document your progress. - -### Overview -We want to analyze to what extent affordable housing residents have access to food pantries and meal programs. To do this, we want to examine the geographic location of these housing projects in relation to their accessibility to pantries and programs. - -### Action Items -Phase 1 -- [x] Find available data sources and add to Resources section, particularly the Food Oasis data. -- [x] Determine is this is one-time or ongoing project (and assign appropriate label) -- [x] Write one-sheet (see Resources below) - - [x] Define stakeholder - - [x] Summarize project, including value add - - [x] Define project 6 month roadmap - - [x] Detail history (if any) -- [x] Define tools to be used for analysis and visualization (if applicable) -- [x] Create issues required to fulfill project requirements, including exploratory data analysis, required tasks, and deliverables - - [x] Create data dictionary (EDA task) - - [x] Perform data cleaning (EDA task) - - [x] Understand and outline data context - -### Resources/Instructions (Updated) -- [Data source](https://data.lacity.org/Housing-and-Real-Estate/LAHD-Affordable-Housing-Projects-List-2003-to-Pres/mymu-zi3s) for affordable housing locations (updated monthly). -- [Visualization](https://data.lacity.org/Housing-and-Real-Estate/LAHD-Affordable-Housing-Projects-List-2003-to-Pres/psx4-6drg) of affordable housing locations. -- [Food Oasis Wiki](https://github.com/hackforla/food-oasis/wiki) -- [LA Food Bank](https://www.lafoodbank.org/find-food/pantry-locator/) -- [LA County Food Distribution Sources and Programs](http://publichealth.lacounty.gov/nut/food-distribution-resources.htm) -- [Chirp LA](https://www.chirpla.org/sites/chirpla.org/files/u532/Food%20Bank%20Guide%202021_4.pdf) PDF containing active food banks in LA -" -180,"City of Los Angeles Arrests","role: data science,epic,size: 3pt,project: EDA","open","dolla24","None",11,"2023-08-23T01:36:50Z","2024-03-19T01:54:19Z","None","CONTRIBUTOR","### Overview -We want to analyze arrest data for the city of Los Angeles, and incorporate data from other sources, to determine its suitability for further analysis by Hack for LA. More information about the data, from the [Controller's office website](): -- The LAPD arrest types fall into five categories identified by the LAPD: (1) felony, (2) misdemeanor, (3) infractions, (4) dependent, and (5) other. -- Under California law, a felony is a crime that is punishable with death, by imprisonment in the state prison, -or . . . by imprisonment in a county jail under [certain provisions]. Every other crime or public offense is a misdemeanor except those offenses that are classified as infractions. Misdemeanors and infractions carry varying degrees of financial and incarceration consequences. -- LAPD makes more arrests for misdemeanor and infraction offenses than for felonies. - - 2019: 55,954 misdemeanor & infraction arrests vs. 33,663 felony arrests - - 2020: 34,659 misdemeanor & infraction arrests vs. 31,015 felony arrests - - 2021: 33,179 misdemeanor & infraction arrests vs. 32,597 felony arrests - - 2022: 30,431 misdemeanor & infraction arrests vs. 30,378 felony arrests - -### Action Items -Phase 1 -- [x] Find available data sources and add to Resources section -- [x] #194 -- [ ] Determine is this is one-time or ongoing project (and assign appropriate label) -- [ ] Write one-sheet (see Resources below) - - [ ] Define stakeholder - - [ ] Summarize project, including value add - - [ ] Define project 6 month roadmap - - [ ] Detail history (if any) -- [x] Define tools to be used for analysis and visualization (if applicable) -- [ ] Create issues required to fulfill project requirements, including exploratory data analysis, required tasks, and deliverables - - [ ] Perform data cleaning (EDA task) - - [ ] Understand and outline data context - -### Resources/Instructions -[Arrest data for 2022 csv file](https://drive.google.com/drive/folders/1QSOWZxUZWQzEsERrtoI3SiFsAUZruZzk) -[Arrest data from the City of Los Angeles](https://www.kaggle.com/datasets/cityofLA/los-angeles-crime-arrest-data -) -Check #178 for updates on whether a real time source for this data have been found -" -179,"CoP: Data Science: City of Los Angeles Evictions","role: data science,epic,size: 3pt,project: EDA","closed","rahul897","None",14,"2023-08-23T01:29:03Z","2024-07-09T21:45:53Z","2024-07-09T21:45:53Z","CONTRIBUTOR","### Prerequisite(s) -If you would like to work on this issue, please add a comment below and include the following information: -- Your name -- How many hours you can commit to working on this in the next week (minimum of 2) -- Commit to providing an update with a comment before the next community of practice meeting - -For example: -- John Doe -- I can commit to working on this issue 3 hours in the following week. -- Yes, I will provide an update on my progress with a comment below. - -Once you have done this, please add yourself to the “Assignees” section on the right and update the issue weekly to document your progress. - -### Overview -We want to analyze eviction data for the city of Los Angeles, and incorporate data from other sources, to determine whether there are actions local leaders can take to address the problem. The following background information is from the [LA Controller's website](https://controller.lacity.gov/landings/evictions): -- August 1, 2023 – rent owed from March 1, 2020 to August 31, 2020 is due. If the Declaration of COVID-19-Related Financial Distress form was returned to the landlord within 15 days of rent being due, they cannot be evicted for nonpayment of rent. -- February 1, 2024 – rent owed from October 1, 2021 to January 31, 2023 is due. If a tenant returned the Declaration of COVID-19-Related Financial Distress form to the landlord within 15 days of rent being due AND paid 25% of rent owed from this period, they cannot be evicted for nonpayment of rent. -- However, since March 27, 2023, landlords may not evict a tenant who falls behind in rent unless the tenant owes an amount higher than the Fair Market Rent (FMR). The FMR depends on the bedroom size of the rental unit. - -### Action Items -Phase 1 -- [ ] Find available data sources and add to Resources section -- [ ] Perform Exploratory Data Analysis (read more [here](https://www.analyticsvidhya.com/blog/2021/08/how-to-perform-exploratory-data-analysis-a-guide-for-beginners/) - - [ ] Create data dictionary (EDA task) - - [ ] Perform data cleaning (EDA task) - - [ ] Understand and outline data context -- [ ] Determine is this is one-time or ongoing project (and assign appropriate label) -- [ ] Write one-sheet (see Resources below) - - [ ] Define stakeholder - - [ ] Summarize project, including value add - - [ ] Define project 6 month roadmap - - [ ] Detail history (if any) -- [ ] Define tools to be used for analysis and visualization (if applicable) -- [ ] Create issues required to fulfill project requirements, including exploratory data analysis, required tasks, and deliverables - - -### Resources/Instructions -[Feb 2023 - July 2023 eviction data csv file](https://drive.google.com/drive/folders/1uyPtg1MNX5LIDwQkFtErmIQJNe9N7X25) -Check #178 for updates on whether a real time source for this data have been found -" -178,"CoP: Data Science: Find data sources through the LA Controller's office","documentation,good first issue,role: data analysis,size: 1pt,project: missing","open","FragariaChemist","None",4,"2023-08-23T01:14:06Z","2024-07-09T21:50:07Z","None","CONTRIBUTOR","### Prerequisite(s) -Skills: Online research - -If you would like to work on this issue, please add a comment below and include the following information: -- Your name -- How many hours you can commit to working on this in the next week (minimum of 2) -- Commit to providing an update with a comment before the next community of practice meeting - -For example: -- John Doe -- I can commit to working on this issue 3 hours in the following week. -- Yes, I will provide an update on my progress with a comment below. -Once you have done this, please add yourself to the “Assignees” section on the right and update the issue weekly to document your progress. - -### Overview -We want to build a library of various local data sources so that we can initiate new data science projects for volunteers that will be useful for the local community. The LA Controller's office seems to have quite a few. - -### Action Items -- [ ] Visit the LA City Controller's website (link below in resources) and create a list of data sets available. -- [ ] Find contact information from the Controller's website and get in touch with someone at the office to determine if real time data is available for the public to access. Examples are eviction and LAPD arrest data, which we have historical data for (links below). -- [ ] Document any correspondence with city representatives in a comment below. -- [ ] If real time data is available, add links to the Resources section. - -### Resources -[LA City Controller Website](https://controller.lacity.gov/data) - In the ""Data Sites"" section there are various resources that should be looked at to determine which may be of interest to Hack for LA -[Google Doc of 2022 LAPD Arrests](https://docs.google.com/spreadsheets/d/1rH272RxRxU8ut3xArEnyOz2j_Qlw5ntEoTJlZwcx_Ng/edit#gid=1936642723) - We should see if this arrest data is available in real time. -[Google Doc of 2023 Eviction Data](https://docs.google.com/spreadsheets/d/1d6xgd95m0v914YTePb8m-9Ir9SVKNKx4-Xw8N_4I0pk/edit#gid=1331260170) - Another data set to check if real time data is available. -" -177,"Create 311 data CSV files that can be accessed through a Jupyter notebook","role: data science,project: 311 Data,size: 3pt,feature: project management","closed","mru-hub","None",3,"2023-03-31T20:57:06Z","2024-06-16T06:27:37Z","2024-04-16T02:32:54Z","CONTRIBUTOR","### Overview -We want to download 311 data and split by year, then month, so each is under 100MB and we can host tan append-only data warehouse on GitHub. - -### Action Items -- [x] Get cleaning rules from the 311-data repo and add a link to the rules to Resources below. -- [x] Get city data -- [x] Split by year, then by month -- [ ] Outline what you did to clean the data in a comment below -- [ ] Create Jupyter notebook to access the data and add notes explaining the cleaning rules -- [ ] Create a website (ideally ghpages) that can display the jupyter notebook so that people don't have to know how to download and install one. - -### Resources/Instructions - -**Cleaning Rules**: https://github.com/hackforla/data-science/blob/main/311-data/CSV_files/Docs/CleaningRules.txt -**City Data:**: https://data.lacity.org/browse?q=311%20data%20%2C%202024&sortBy=relevance (Please update the filter for the year 2024 based on the requirements.) - - - - - - -) - -" -170,"California Grants Portal Data Set EDA","role: data analysis,project duration: ongoing,size: 1pt,project: EDA","closed","jossus657","None",9,"2022-07-15T17:08:16Z","2022-11-08T21:32:03Z","2022-11-08T21:32:03Z","CONTRIBUTOR","### Overview -We want to analyze the California Grants Portal Dataset to determine the feasibility of building a tool for public use. - -### Action Items -- [x] Perform exploratory data analysis [EDA] on the Grants Portal dataset. - - [x] See EDA section of tutorial [here](https://github.com/hackforla/data-science/wiki/Intro-to-Data-Analysis-With-Python) -- [x] Attach findings (write-ups, notebooks, etc.) to this issue. - -### Resources/Instructions -[California Grants Portal Website](https://data.ca.gov/dataset/california-grants-portal) -[Data Analysis tutorial with EDA instructions](https://github.com/hackforla/data-science/wiki/Intro-to-Data-Analysis-With-Python) -" -169,"Potential Projects: California Grants Portal","role: data science,epic,size: epic,dependency,project: EDA","closed","mihikasahani","None",7,"2022-07-15T16:58:19Z","2024-02-13T03:48:56Z","2024-02-13T03:48:51Z","CONTRIBUTOR","### Dependency -- [x] EDA on a data set in progress - -### Overview -We want to explore this data set to determine if it is something we can build a public resource for. - -### Action Items -- [x] Find available data sources and add to Resources section -- [ ] Determine is this is one-time or ongoing project (and assign appropriate label) -- [ ] Write one-sheet (see Resources below) - - [ ] Define stakeholder - - [ ] Summarize project, including value add - - [ ] Define project 6 month roadmap - - [ ] Detail history (if any) -- [ ] Define tools to be used for analysis and visualization (if applicable) -- [ ] Create issues required to fulfill project requirements, including exploratory data analysis, required tasks, and deliverables - - [ ] Create data dictionary (EDA task) - - [ ] Perform data cleaning (EDA task) - - [ ] Understand and outline data context - -### Resources/Instructions - -- [California Grants Portal Website](https://data.ca.gov/dataset/california-grants-portal) -- EDA: #170 -- [How to write a onesheet for your project](https://docs.google.com/document/d/14-nYNCjrMrC8lucUNBfLOFQZk2sWbkRlJgigWDAlqlI/edit#heading=h.z4tpbshl55b9)" -163,"CoP: Data Science: Complete Intro to Command Line and Git Tutorial","feature: guide,role: data science,size: 1pt,role: Data Engineer","open","None","None",0,"2022-06-03T18:26:54Z","2024-06-18T21:57:35Z","None","CONTRIBUTOR","### Overview -We want to move an existing Hack for LA tutorial to our repository - -### Action Items -- [ ] Look through the repository to see what content needs to be moved to the tutorials page -- [ ] Add content to tutorial - -### Resources/Instructions -[Original repository](https://github.com/hackforla/intro-cli-git-github) -[Data Science Command Line and Git Tutorial Page](https://github.com/hackforla/data-science/wiki/Introduction-to-Commandline,-Git-and-Github) -" -162,"CoP: Data Science: Complete Intro to Python Tutorial","feature: guide,role: data science,size: 1pt,draft,ready for product","open","None","None",2,"2022-06-03T18:23:12Z","2024-06-18T21:57:38Z","None","CONTRIBUTOR","### Overview -We want to move an existing Hack for LA tutorial to our repository - -### Action Items -- [ ] fork the repository to Hack for LA -- [ ] Look through the repository and our tutorial page to see what content needs to be added -- [ ] Fork repo to your github, using hackforla's version as the upstream -- [ ] Add content to tutorial -- [ ] Commit it to our repo -- [ ] reach out to Nathan on this issue and find out if he wants us to do a PR against his repo. - -### Resources/Instructions -- [Original repository](https://github.com/ndanielsen/intro-python) -- [Data Science Python Tutorial Page](https://github.com/hackforla/data-science/wiki/Introduction-to-Programming-with-Python) -" -161,"Project Name: Lucky Parking","feature: missing,role: data analysis,size: 1pt,project: missing","closed","None","None",2,"2022-05-16T23:31:23Z","2022-07-08T19:37:14Z","2022-06-03T23:13:17Z","MEMBER","Volunteer Opportunity:" -157,"CoP: Data Science: Create Deep Learning Tutorial","documentation,feature: guide,role: data science,size: 1pt","open","elliealbertson","None",1,"2022-04-01T19:26:34Z","2024-07-07T19:24:44Z","None","CONTRIBUTOR","### Overview -Update the [Deep Learning](https://github.com/hackforla/data-science/wiki/Deep-Learning) page with resources and an article header. - -### Action Items -- [ ] Create a Google Doc in the folder provided under resources - - [x] Create preliminary outline - - [ ] Identify resources with vetted tutorials covering important skills within the tutorial area, adding to the draft - - [ ] Draft an introductory paragraph explaining what the tutorial resources cover and why a new data scientist would use them for working with data at Hack For LA -- [ ] Review the draft with the Data Science CoP - - [ ] Make needed updates -- [ ] Add to the wiki page - -### Resources/Instructions -#### Wiki page -[Deep Learning Tutorial](https://github.com/hackforla/data-science/wiki/Deep-Learning) - -#### Location for any files you might need to upload (drafts, images, etc.) -- [Folder for files related to the Deep Learning tutorial](https://drive.google.com/drive/folders/1j_txrPW6uf6MjKTYqw_K1sIUtsZvbsLy) - - [DS: Deep Learning Tutorial Google Doc](https://docs.google.com/document/d/1YME-GyyStpl9QO3zKD-gatUgg1womzZjsdeYDaom71A/edit) - -#### Tools that are core that should be mentioned: -- PyTorch -- Keras/TensorFlow -- HuggingFace - -#### Examples of resources that would be useful to include: -- Web how-to/tutorial/walk-throughs -- Youtube playlists or videos demonstrating tools -- Links to blogs or platforms with subject matter experts -" -156,"CoP: Data Science: Create Machine Learning Tutorial","documentation,feature: guide,role: data science,size: 1pt","open","BhavanaSai12","None",0,"2022-04-01T19:24:06Z","2024-06-25T03:48:00Z","None","CONTRIBUTOR","### Overview -Update the [Machine Learning](https://github.com/hackforla/data-science/wiki/Machine-Learning) page with resources and an article header. - -### Action Items -- [x] Create a Google Doc in the folder provided under resources - - [x] Draft an introductory paragraph explaining what the tutorial resources cover and why a new data scientist would use them for working with data at Hack For LA - - [x] Identify resources with vetted tutorials covering important skills within the tutorial area, adding to the draft -- [ ] Review the draft with the Data Science CoP -- [ ] Add to the wiki page - -### Resources/Instructions -#### Wiki page -[Machine Learning Tutorial](https://github.com/hackforla/data-science/wiki/Machine-Learning) - -#### Location for any files you might need to upload (drafts, images, etc.) -- [Folder for files related to the Machine Learning tutorial](https://drive.google.com/drive/folders/1Z7ei04hVHBFVCN484_yoqAgQFz8XP3Rz) - - [DS: Machine Learning Tutorial Google Doc](https://docs.google.com/document/d/1MVabfo0fhJQ3-9wQlDn9NLnV0ZvNzP3Fu014Guj_deA/edit) - -#### Tools that are core that should be mentioned: -- XGBoost -- Random Forest - -#### Examples of resources that would be useful to include: -- Web how-to/tutorial/walk-throughs -- Youtube playlists or videos demonstrating tools -- Links to blogs or platforms with subject matter experts - -" -155,"CoP: Data Science: Create Stats Tutorial","documentation,feature: guide,role: data science,role: data analysis,size: 1pt","open","None","None",0,"2022-04-01T19:21:26Z","2024-06-18T21:57:20Z","None","CONTRIBUTOR","### Overview -Update the [Stats](https://github.com/hackforla/data-science/wiki/Almighty-Statistics) page with resources and an article header. - -### Action Items -- [ ] Create a Google Doc in the folder provided under resources - - [ ] Draft an introductory paragraph explaining what the tutorial resources cover and why a new data scientist would use them for working with data at Hack For LA - - [ ] Identify resources with vetted tutorials covering important skills within the tutorial area, adding to the draft -- [ ] Review the draft with the Data Science CoP -- [ ] Add to the wiki page - -### Resources/Instructions -#### Wiki page -[Stats Tutorial](https://github.com/hackforla/data-science/wiki/Almighty-Statistics) - -#### Location for any files you might need to upload (drafts, images, etc.) -- [Folder for files related to the Stats tutorial](https://drive.google.com/drive/folders/1MLPi4eo6BaurUDh-fIBW85ZKPE3jDERu) - - [DS: Stats Tutorial Google Doc](https://docs.google.com/document/d/1FMbaMeVRcVQPiIGW3QvO1ah8ocptNFwLiQjTZ-RUhsU/edit) - -#### Skills that are core that should be mentioned: -- Logistic/Linear Regression -- Experimental Design -- Significance Testing -- Bayesian Analysis - -#### Examples of resources that would be useful to include: -- Web how-to/tutorial/walk-throughs -- Youtube playlists or videos demonstrating tools -- Links to blogs or platforms with subject matter experts - -" -154,"CoP: Data Science: Create Data Ops Tutorial","documentation,feature: guide,size: 1pt,role: Data Engineer","open","None","None",1,"2022-04-01T19:18:14Z","2024-06-18T21:57:17Z","None","CONTRIBUTOR","### Overview -Update the [Data Ops](https://github.com/hackforla/data-science/wiki/Data-Ops) page with resources and an article header. - -### Action Items -- [ ] Create a Google Doc in the folder provided under resources - - [ ] Draft an introductory paragraph explaining what the tutorial resources cover and why a new data scientist would use them for working with data at Hack For LA - - [ ] Identify resources with vetted tutorials covering important skills within the tutorial area, adding to the draft -- [ ] Review the draft with the Data Science CoP -- [ ] Add to the wiki page - -### Resources/Instructions -#### Wiki page -[Data Ops Tutorial](https://github.com/hackforla/data-science/wiki/Data-Ops) - -#### Location for any files you might need to upload (drafts, images, etc.) -- [Folder for files related to the Data Ops tutorial](https://drive.google.com/drive/folders/1j5r7SDEHAUhUbI6DDD3MxvobPfU9WiH-) - - [DS: Data Ops Tutorial Google Doc](https://docs.google.com/document/d/15WdOrTtKGuZL3rPuQm3h4SEWuUL4vML0XtPas8fBIwc/edit) - -#### Tools that are core that should be mentioned: -- EC2 -- Lambda -- RDS -- Athena/Hive -- Flask - -#### Examples of resources that would be useful to include: -- Web how-to/tutorial/walk-throughs -- Youtube playlists or videos demonstrating tools -- Links to blogs or platforms with subject matter experts" -153,"CoP: Data Science: Create Text Analysis Tutorial","documentation,feature: guide,role: data analysis,size: 1pt","open","bfang22","None",3,"2022-04-01T19:15:05Z","2024-06-18T21:59:08Z","None","CONTRIBUTOR","### Overview -Update the [Text Analysis](https://github.com/hackforla/data-science/wiki/Text-Analysis) page with resources and an article header. - -### Action Items -- [ ] Create a Google Doc in the folder provided under resources - - [ ] Draft an introductory paragraph explaining what the tutorial resources cover and why a new data scientist would use them for working with data at Hack For LA - - [ ] Identify resources with vetted tutorials covering important skills within the tutorial area, adding to the draft -- [ ] Review the draft with the Data Science CoP -- [ ] Add to the wiki page - -### Resources/Instructions -#### Wiki page -[Text Analysis Tutorial](https://github.com/hackforla/data-science/wiki/Text-Analysis) - -#### Location for any files you might need to upload (drafts, images, etc.) -- [Folder for files related to the Text Analysis tutorial](https://drive.google.com/drive/folders/1HApzgkLXfsRgzCrIKWFI08m5NfkWxz9W) - - [DS: Text Analysis Tutorial Google Doc](https://docs.google.com/document/d/1-UTjQ_lonjEp1lZ0agjtEgP1sGYUSEvGgKij97qY7A8/edit) - -#### Tools that are core that should be mentioned: -- nltk -- SpaCy - -#### Examples of resources that would be useful to include: -- Web how-to/tutorial/walk-throughs -- Youtube playlists or videos demonstrating tools -- Links to blogs or platforms with subject matter experts -" -152,"CoP: Data Science: Open Source Projects Data Set for Climate projects","role: data analysis,size: 1pt,epic: GreenEarthOS,project: climate projects,draft","open","noneill256","None",12,"2022-03-18T21:04:03Z","2024-06-18T21:59:30Z","None","CONTRIBUTOR","### Overview -We want to monitor various open source projects and create a script that will keep our listing up-to-date with information such as last active. - -### Action Items -- [x] Add each project's URL to the Project URL column (You can see that if you hover over the Project name and description cells. You can also try to pull it from the Project URLs link in Resources below) -- [x] Create a script that will visit each URL and update the following columns (Note that you may have to work around -API limits): - - [x] Project URL - - [x] Last Active Date (Date) - - [x] Number of contributors - - [ ] Connect with volunteer who developed script to copy files over to Hack for LA repository. Code is [here](https://github.com/noneill256/Hack4LA-Project-Spreadsheet-Updater?tab=readme-ov-file). - - [ ] Assess current script and make necessary edits so it can be automated to run and produce output periodically. - - [ ] Develop a data dashboard to display information in a useful way - -### Resources/Instructions -Spreadsheet of open source projects we want to monitor [here](https://docs.google.com/spreadsheets/d/1LFResU_pcP5IMwz92dmPQRoKJ4lNa3tvr-_COJiE_hc/edit#gid=0) -Original source for project URLs [here](https://opensustain.tech/) -[Info about getting URL from a Google Sheet cell](https://www.oksheets.com/extract-hyperlink-url/#:~:text=Extract%20the%20URL%20from%20a%20Hyperlink%20by%20Edit%20and%20Copy&text=You%20can%20right%2Dclick%20on,option%20from%20the%20Insert%20menu.) -" -149,"Weekly Label Check","role: product,size: 1pt,feature: project management","open","None","None",17,"2022-02-25T21:38:09Z","2024-03-29T19:41:34Z","None","MEMBER","### Dependency -2022-02-24 - -### Overview -We need to perform a weekly check on issues to make sure they all conform to our label and milestone schema. - -### Action Items -- [ ] Check to see if there are any issues that have not yet been added to [project board](https://github.com/hackforla/data-science/issues?q=is%3Aissue+is%3Aopen+no%3Aproject) - - [ ] if yes, add to project board -- [ ] Check the following to make sure that they are all 0/0 (Links for checking to make sure that all issues either have a ___ or a ____ missing label) - - [feature and project](https://github.com/hackforla/data-science/issues?q=-label%3A%22project%3A+health+services%22+-label%3A%22project%3A+metro+311+correlation%22+-label%3A%22project%3A+district+types%22+-label%3A%22project%3A+structured+context%22+-label%3A%22project%3A+311-data-dashboards%22+-label%3A%22project%3A+311+Data%22+-label%3A%22project%3A+missing%22+-label%3A%22project%3A+health+services%22+-label%3A%22project%3A+metro+311+correlation%22+-label%3A%22project%3A+structured+context%22+-label%3A%22project%3A+311-data-dashboards%22+-label%3A%22project%3A+311+Data%22+-label%3A%22project%3A+seie%22+-label%3A%22project%3A+OCS%22+-label%3A%22project%3A+missing%22+is%3Aissue+-label%3A%22feature%3A+project+management%22+-label%3A%22feature%3A+guide%22+-label%3A%22feature%3A+onboarding%22+-label%3A%22feature%3A+recruitment%22+-label%3A%22feature%3A+missing%22+-label%3A%22feature%3A+labels%22+-label%3A%22project%3A+Native+Land+Attribution%22+-label%3A%22project%3A+LAANE%22+-label%3A%22project%3A+Climate+Collabathon%22+-label%3A%22project%3A+climate+projects%22+-label%3A%22feature%3A+branding%22+-label%3A%22project%3A+EDA%22+-label%3A%22project%3A+Data+Science+CoP+Maintenance%22+-label%3A%22project%3A+MC+Southern+Africa+NGOs%27+OS+usage%22+is%3Aopen) The feature and project missing tags are mutually exclusive. Please use the correct label. - - [role](https://github.com/hackforla/data-science/issues?q=is%3Aopen+is%3Aissue+-label%3A%22role%3A+product%22+-label%3A%22role%3A+CoP+lead%22+-label%3A%22role%3A+data+analysis%22+-label%3A%22role%3A+data+science%22+-label%3A%22role%3A+Data+Engineer%22+-label%3A%22role%3A+missing%22+-label%3A%22role%3A+org%22) - - [size](https://github.com/hackforla/data-science/issues?q=is%3Aopen+is%3Aissue+-label%3A%22size%3A+epic%22+-label%3A%22size%3A+missing%22+-label%3A%22size%3A+1pt%22+-label%3A%22size%3A+2pt%22+-label%3A%22size%3A+3pt%22+-label%3A%22size%3A+5pt%22+-label%3A%22size%3A+8pt%22+-label%3A%22size%3A+13pt%22+-label%3A%22size%3A+0.5pt%22+-label%3A%22size%3A+0.25pt%22) - - [ ] Add any label missing (e.g., a new feature label go added to the project, but didn't yet get added here) and update link -- [ ] if changes were made, recheck to make sure all links ended up 0/0 -- [ ] Audit for issues with Missing labels (to determine if we are making progress) - - [ ] Copy this audit report format to a comment below -``` -Date: -- [ ] Did you have to update the 0/0 links above? if yes, please say what you did (e.g, Added features to the features check above or added an issue to the project board that has not been added yet). -- [ ] Report what the numbers are on issues that are missing labels (e.g., size, ending at 12/5) - - [ ] feature, ending at - - [ ] project, ending at - - [ ] role, ending at - - [ ] size, ending at - - [x] milestone, ending at (don't have yet, go ahead and skip) -``` - - [ ] perform audit, adding your numbers to your audit report comment - - [feature](https://github.com/hackforla/data-science/issues?q=is%3Aopen+is%3Aissue+label%3A%22feature%3A+missing%22) - - [project](https://github.com/hackforla/data-science/issues?q=is%3Aopen+is%3Aissue+label%3A%22project%3A+missing%22) - - [role](https://github.com/hackforla/data-science/issues?q=is%3Aopen+is%3Aissue+label%3A%22role%3A+missing%22) - - [size](https://github.com/hackforla/data-science/issues?q=is%3Aopen+is%3Aissue+label%3A%22size%3A+missing%22) - - [epic](https://github.com/hackforla/data-science/issues?q=is%3Aopen+is%3Aissue+label%3A%22epic%3A+missing%22) - -### Epic - - [epic](https://github.com/hackforla/data-science/issues?q=is%3Aopen+is%3Aissue+-label%3A%22epic%3A+missing%22+-label%3A%22epic%3A+potential+projects%22+-label%3A%22epic%22+-label%3A%22epic%3A+311+public+data+sets%22+-label%3A%22epic%3A+internal+projects%22+-label%3A%22epic%3A+empowerla.org%22+) - - -#### Icebox issue -- [ ] [Icebox Issues w no dependency](https://github.com/hackforla/data-science/projects/1?card_filter_query=-label%3Adependency#column-139348530)" -160,"CoP: Data Science: Survey: Repo Labels","project duration: ongoing,size: 2pt,time sensitive,role: Data Engineer,feature: labels,feature: Issues Dashboard","closed","jossus657","time sensitive",31,"2022-02-13T23:40:23Z","2024-06-18T21:59:39Z","2023-03-31T19:20:56Z","MEMBER","### Overview -We need to survey labels across the organization so that we can rationalize and do automation and org-wide audits. - -#### Additional Details -We already have an automation running on the github.com/hackforla/website repo that adds labels that start with ""missing:"" and let the user know what other labels are required. The user can still add optional labels, but they must use the minimum. We want to roll this automation out to all the teams, but in order to do so, they must all be uses the minimum labeling in the same way. - -We have a [kanban guide](https://docs.google.com/document/d/11Fe7mNdmPBP5bD_yLJ1C0_I1TmoK47AuHHrdhdDyWCs/), but it's confusing to users if all the projects don't use similar labels, so we want to have a base set of labels that will be documented in our instructions. - -### Action Items -- [x] Define data schema -- [x] Create spreadsheet -- [x] Write a script that gets a list of all labels for each repo across all orgs controlled by hack for la -- [x] Find main label types -- [ ] Analysis by appearance -- [ ] Release dependency on hackforla/ops#15 - -### Resources/Instructions -#### Orgs to poll -- Hackforla -- 100automations -- civictechindex -- civictechstructure -- hackla-engage - -#### Data Schema -- name of org -- name of repo -- title of label -- description of label -- type of label (role, size, priority, difficulty/complexity, status, feature, p-feature, etc) -- number of issues per label - -### Resources -- [DS Google Drive, label audit folder](https://drive.google.com/drive/u/0/folders/1l8bMfhmUPG1O3nCLgvIE7jd_HlqAIb1s) -- [combined_csv - google sheet](https://docs.google.com/spreadsheets/d/1KOCPa4KPE9GPwVdwkUCyvXwLLqUF2TP2rDlOodN62xc/edit#gid=614668317) -- [Last output from 8/2/22: label.csv](https://github.com/hackforla/data-science/files/9245408/label.csv)" -148,"Create Geospatial Data Analysis Tutorial","documentation,feature: guide,role: org,size: 1pt","closed","mcmorgan27","None",4,"2022-02-04T04:46:32Z","2022-03-25T16:08:03Z","2022-03-25T16:08:03Z","CONTRIBUTOR","### Overview -Update the [Geospatial Data Analysis](https://github.com/hackforla/data-science/wiki/Geospatial-Data-Analysis) with resources and an article header. - -### Action Items -Update the Tutorial with: -- An introductory paragraph explaining what the tutorial resources cover and why a new data scientist would use them for working with data at Hack For LA -- A resources section listing vetted tutorials covering important skills within the tutorial area - -### Resources/Instructions -Tools that are core that should be mentioned: -- Geopandas -- Shape files -- Coordinate Systems -- GIS -- Data sources - -Examples of resources that would be useful to include: -- Web how-to/tutorial/walk-throughs -- Youtube playlists or videos demonstrating tools -- Links to blogs or platforms with subject matter experts -" -147,"Create Data Engineering Tutorial","documentation,feature: guide,role: org,size: 1pt","closed","jonmelvin","None",9,"2022-01-21T05:33:37Z","2022-05-12T20:56:34Z","2022-05-12T20:56:34Z","CONTRIBUTOR","### Overview -We need to add content to the Data Engineering tutorial wiki with resources and an article header, so that new data scientist joining our organization will have them for working with data at Hack for LA. - -### Action Items -- [ ] Create a Google Doc in the folder provided under resources - - [ ] Draft an introductory paragraph explaining what the tutorial resources cover and why a new data scientist would use them for working with data at Hack For LA - - [ ] Identify resources with vetted tutorials covering important skills within the tutorial area, adding to the draft -- [ ] Review the draft with the Data Science CoP -- [ ] Add to the wiki page - -### Resources/Instructions -#### Wiki page -[Data Engineering Tutorial](https://github.com/hackforla/data-science/wiki/Data-Engineering) - -#### Location for any files you might need to upload (drafts, images, etc.) -- [Folder for files related to the Data Engineering tutorial](https://drive.google.com/drive/folders/1tgnZPI7XJzu0xTkliBnMN8dswnQ_5Q97) - - [DS: Data Engineering Tutorial Google Doc](https://docs.google.com/document/d/1CqEiUtNMs-l83tiIwvciKx9FCkj3fgBhJ-Hmj7kEGFc/edit) - -#### Tools that are core that should be mentioned: -- SQL -- NoSQL - -#### Examples of resources that would be useful to include: -- Web how-to/tutorial/walk-throughs -- YouTube playlists or videos demonstrating tools -- Links to blogs or platforms with subject-matter experts -" -146,"Create Web Scraping Tutorial","documentation,feature: guide,role: org,size: 1pt","open","parcheesime","None",14,"2022-01-21T05:31:23Z","2022-10-14T03:13:48Z","None","CONTRIBUTOR","### Overview -Update the [Web Scraping](https://github.com/hackforla/data-science/wiki/Webscraping) with resources and an article header. - -### Action Items -- [x] Create a Google Doc in the folder provided under resources - - [x] Draft an introductory paragraph explaining what the tutorial resources cover and why a new data scientist would use them for working with data at Hack For LA - - [x] Identify resources with vetted tutorials covering important skills within the tutorial area, adding to the draft -- [x] Review the draft with the Data Science CoP -- [x] Add to the wiki page - -### Resources/Instructions -#### Wiki page -[Web Scraping Tutorial](https://github.com/hackforla/data-science/wiki/Webscraping) - -#### Location for any files you might need to upload (drafts, images, etc.) -- [Folder for files related to the Web Scraping tutorial](https://drive.google.com/drive/folders/13GEPhfLPk4MCpEIpzNwtwJBqivmh4etS) - - [DS: Web Scraping Tutorial Google Doc](https://docs.google.com/document/d/15774XE_xw7sDIjUmW95ThttgYFQ-1OVFHWpjOETNH2I/edit) - -#### Tools that are core that should be mentioned: -- Python -- Selenium -- BeautifulSoup -- Requests - -Consider linking to [these videos Sophia made](https://drive.google.com/drive/folders/1fWIRwKI6vUj2RT8YPFHXG9eHMdNSfty_) - -#### Examples of resources that would be useful to include: -- Web how-to/tutorial/walk-throughs -- Youtube playlists or videos demonstrating tools -- Links to blogs or platforms with subject matter experts -" -145,"CoP: Data Science: Create Data Analysis With R Tutorial","documentation,feature: guide,role: data analysis,size: 1pt","open","xuyeluo","None",5,"2022-01-21T05:27:52Z","2024-07-09T00:07:17Z","None","CONTRIBUTOR","### Overview -Update the [Data Analysis With R](https://github.com/hackforla/data-science/wiki/Data-Analysis-With-R) with resources and an article header. - -### Action Items -- [ ] Create a Google Doc in the folder provided under resources - - [ ] Draft an introductory paragraph explaining what the tutorial resources cover and why a new data scientist would use them for working with data at Hack For LA - - [ ] Identify resources with vetted tutorials covering important skills within the tutorial area, adding to the draft -- [ ] Review the draft with the Data Science CoP -- [ ] Add to the wiki page - -### Resources/Instructions -#### Wiki page -[Data Analysis with R Tutorial](https://github.com/hackforla/data-science/wiki/Data-Analysis-With-R) - -#### Location for any files you might need to upload (drafts, images, etc.) -- [Folder for files related to the Data Analysis tutorial](https://drive.google.com/drive/folders/1vwqkqIZsElTmQm36pPmAvsTlRCjHkZWo) - - [DS: Data Analysis with R Tutorial Google Doc](https://docs.google.com/document/d/1JfpbJDxPyzFusHrxWh57wV1kqnb5i6KFloXWCSFcG28/edit) - -#### Tools that are core that should be mentioned: -- Tidyverse -- CRAN - -#### Examples of resources that would be useful to include: -- Web how-to/tutorial/walk-throughs -- Youtube playlists or videos demonstrating tools -- Links to blogs or platforms with subject matter experts -" -144,"Create Data Visualization Tutorial","documentation,feature: guide,role: org,size: 1pt,feature: needs peer review","closed","None","None",4,"2022-01-21T05:22:40Z","2022-05-05T18:10:49Z","2022-05-05T18:10:34Z","CONTRIBUTOR","### Overview -Update the [Data Visualization Tutorial](https://github.com/hackforla/data-science/wiki/Data-Visualization) with resources and a header explaining - -### Action Items -- [x] Create a Google Doc in the folder provided under resources - - [x] Draft an introductory paragraph explaining what the tutorial resources cover and why a new data scientist would use them for working with data at Hack For LA - - [x] Identify resources with vetted tutorials covering important skills within the tutorial area, adding to the draft -- [x] Review the draft with the Data Science CoP -- [x] Add to the wiki page - -### Resources/Instructions -#### Wiki page -[Data Visualization Tutorial](https://github.com/hackforla/data-science/wiki/Data-Visualization) - -#### Location for any files you might need to upload (drafts, images, etc.) -- [Folder for files related to the Data Visualization tutorial](https://drive.google.com/drive/folders/1IiTuyJlJ3o1KRVXh8l9cpjrPu7zSVm1s) - - [DS: Data Visualization Tutorial Google Doc](https://docs.google.com/document/d/1umiXKgA0qnK3VpArEfIkENq-Nm4YJHTMzHE7laHKCRE/edit) - -#### Tools that are core that should be mentioned: -- Pandas -- Seaborn -- Matplotlib -- Tableau - -#### Examples of resources that would be useful to include: -- Web how-to/tutorial/walk-throughs -- Youtube playlists or videos demonstrating tools -- Links to blogs or platforms with subject matter experts -" -143,"Create ETL/Data Cleaning Tutorial","documentation,feature: guide,role: org,size: 1pt","closed","ryanmswan","None",2,"2022-01-21T05:19:10Z","2023-03-31T19:14:05Z","2023-03-31T19:14:05Z","CONTRIBUTOR","### Overview -Update the [ETL/Data Cleaning Tutorial](https://github.com/hackforla/data-science/wiki/ETL-Data-Cleaning-Resources) with resources and a header explaining - -### Action Items -Create [wiki page in the data-science repo](https://github.com/hackforla/data-science/wiki) for the listed subject -Update the Tutorial with: -- An introductory paragraph explaining what the tutorial resources cover and why a new data scientist would use them for working with data at Hack For LA -- A resources section listing vetted tutorials covering important skills within the tutorial area - -### Resources/Instructions -Examples of resources that would be useful to include: -- Web how-to/tutorial/walk-throughs -- Youtube playlists or videos demonstrating tools -- Links to blogs or platforms with subject matter experts -" -141,"Obtain Shape Files for Different District Types (2023)","good first issue,role: data analysis,project duration: one time,dependency,size: 1pt,project: district types","closed","mru-hub","None",2,"2021-12-10T19:43:11Z","2023-07-21T03:58:29Z","2023-07-21T03:58:29Z","CONTRIBUTOR","### Overview -We need to obtain new shape files for the various district types that represent stakeholders we work with once they reflect redistricting. This is part of the epic issue #118 and should be done annually until we can automate the process. - -### Action Items -- [x] Save shape files updated in 2022 to the data-science repo. Folder is [here](https://drive.google.com/drive/folders/13oZXXIfYlEzW0LaPL8OhrmGGigBkd8K6) - -- [x] Save shape files updated in 2023 to the Google drive folder [here](https://drive.google.com/drive/folders/1ZQWeoe9O6NdC2KXIZ2WQJbcct7vsvDMS): - - [x] [Neighborhood Councils (NCs)](https://empowerla.org/councils/) | [_Shape Files Here_](https://geohub.lacity.org/datasets/neighborhood-council-boundaries-2018/explore) - - [x] [Supervisory Districts](https://bos.lacounty.gov/About-Us/Board-of-Supervisors) | [_Shape Files Here_](https://egis-lacounty.hub.arcgis.com/datasets/c7d0324969684262a7f01ce9222c8657/explore) - - [x] [Business Improvement Districts (BIDs)](https://clerk.lacity.org/clerk-services/bids) | [_Shape Files Here_](https://data.lacity.org/Housing-and-Real-Estate/Business-Improvement-Districts/733b-ryrk) - - [x] [U.S. House of Representatives that serve LA](http://www.laalmanac.com/government/gu02map.php) | [_Shape Files Here_](https://wedrawthelines.ca.gov/maps-final-draft-congressional-districts/) - - [x] [California State Senators that serve LA](https://www.senate.ca.gov/) | [_Shape Files Here_](https://wedrawthelines.ca.gov/maps-final-draft-senate-districts/) - - [x] [California Assembly members that serve LA](https://www.assembly.ca.gov/assemblymembers) | [_Shape Files Here_](https://wedrawthelines.ca.gov/maps-final-draft-assembly-districts/) - - [x] Zip Code | [_Shape Files Here_](https://hub.arcgis.com/datasets/lacounty::la-county-zip-codes/explore) - - [x] LA Country City Boundaries | [_Shape Files Here_](https://egis-lacounty.hub.arcgis.com/datasets/lacounty::city-boundaries-lines/about) - - Note that some of these (i.e. zip codes) may not be updated with redistricting. - -### Resources/Instructions -Google Drive folder for storing files [here](https://drive.google.com/drive/folders/1KsIfAFmp0ArLauvHY1k9wRc9ZXaPDahe)" -140,"Create a logo for the Data Science CoP","project duration: one time,size: 2pt,feature: branding,role: design","closed","None","None",4,"2021-12-01T07:04:58Z","2024-06-18T02:59:55Z","2022-09-15T22:45:43Z","MEMBER","### Overview - -Create a logo for the Data Science CoP, for use in powerpoint presentations and deliverables. We currently have a draft version in need of refinement. It will likely need to be rebuilt. - -### Action Items - -- [x] Refine current draft of logo - - [x] Simplify visually - fewer elements, no backdrop, better perspective angle - - [x] Find clearer placement for Hack for LA logo within or alongside Data Science logo. - -### Resources/Instructions - -#### Latest Files - -The latest files are saved in this folder in the Data Science shared Google Drive: -https://drive.google.com/drive/u/0/folders/1d0IrLzTGEWP5PkGS0aocLi1tCK2jSt8b - -The latest file is an svg made with google drawing. The [main Base Logo file is here](https://docs.google.com/drawings/d/1ARUH58iSl4_cMo5RyQ8xhPcAmfjnf3uZF9mjcF7iTBU/edit) and the [Favicon file is here](https://docs.google.com/drawings/d/1hb_1iSWR0XYV2bZMOqsKa_7X3hqW-uMDjhryaJ6R-7U/edit). - -##### Previous Draft -The previous draft was made by combining a matplotlib background (for the beloved(?) data science aesthetic) with other elements photoshopped into place. The files can be found in the [logo branch](https://github.com/hackforla/data-science/tree/logo) of the repo here: - -- [Backdrop rendered in jupyter notebook](https://github.com/hackforla/data-science/tree/logo/logo/Logo_Elements/3d_backdrop/logo_backdrop_matplotlib.ipynb) -- [Photoshop file](https://github.com/hackforla/data-science/tree/logo/logo/Logo_Assembly_cropped.psd) -- [Draft logo](https://github.com/hackforla/data-science/tree/logo/logo/Drafts/Logo_outlined.png) -- [Alternate draft 1](https://github.com/hackforla/data-science/tree/logo/logo/Drafts/Logo_Assembly_option_1.png) -- [Alternate draft 2](https://github.com/hackforla/data-science/tree/logo/logo/Drafts/Logo_Assembly_option_2.png)" -138,"Starter Project for New Data Analysts and Data Scientists","role: product,role: CoP lead,feature: onboarding,size: epic,dependency","closed","None","None",1,"2021-11-19T20:36:07Z","2021-11-30T19:00:45Z","2021-11-30T19:00:45Z","CONTRIBUTOR","### Dependency -Discuss how this integrates with Hack for LA plans to engage new data scientists and get them working on productive projects. - -### Overview -CoP lead proposed developing a starter project using a toy data set to help new recruits understand various tasks and allow mentors to provide useful feedback based on deeper understanding of the project and data set. - -### Action Items -- [ ] Determine what skills should be used in the starter project. Possibilities include: - - Data cleaning - - Documentation (i.e. adding structured context) - - Technologies (i.e. Jupyter, Python, numPy, SQL,,etc.) - - Summary of findings -- [ ] Collect useful resources and documentation for recruits to refer to -- [ ] Select data set(s) to be used for project -- [ ] Create notebook with step-by-step instructions -- [ ] Get feedback from CoP - -### Resources/Instructions -" -137,"Data Science CoP Meeting Agendas (Monday 7PM PST)","feature: guide,role: product,size: 1pt,feature: agenda","open","akhaleghi","None",40,"2021-11-19T04:37:43Z","2024-03-19T02:10:52Z","None","CONTRIBUTOR","### Overview -This issue tracks the agenda for the Data Science CoP meetings. - -### Weekly Agenda, Recordings, and Transcripts -- November 2021: [2021-11-18](https://github.com/hackforla/data-science/issues/137#issuecomment-973737827), [2021-11-19](https://github.com/hackforla/data-science/issues/137#issuecomment-974421073) -- December 2021: [2021-12-02](https://github.com/hackforla/data-science/issues/137#issuecomment-985213751), [2021-12-16](https://github.com/hackforla/data-science/issues/137#issuecomment-996413245) -- January 2022: [2022-01-06](https://github.com/hackforla/data-science/issues/137#issuecomment-1007118306), [2022-01-13](https://github.com/hackforla/data-science/issues/137#issuecomment-1012720668), [2022-01-20](https://github.com/hackforla/data-science/issues/137#issuecomment-1017927346), [2022-01-27](https://github.com/hackforla/data-science/issues/137#issuecomment-1023838528) -- February 2022: [2022-02-03](https://github.com/hackforla/data-science/issues/137#issuecomment-1029601494), [2022-02-10](https://github.com/hackforla/data-science/issues/137#issuecomment-1035832929), [2022-02-17](https://github.com/hackforla/data-science/issues/137#issuecomment-1043847040), [2022-02-24](https://github.com/hackforla/data-science/issues/137#issuecomment-1050495019) - -### Resources/Instructions -[Zoom link for Thursday meetings](https://us02web.zoom.us/j/81067015817?pwd=M3l6a0tQTWhLbnlTbEZNOWJ5UXN3QT09) -" -135,"CoP: Data Science: Find and document all the 311 public data sets","role: data analysis,project: 311 Data,size: 2pt,epic: 311 public data sets","open","venkata-sai-swathi","None",5,"2021-11-12T21:47:39Z","2024-07-10T16:39:05Z","None","MEMBER","### Overview -We need to create a definitive list of all the city government data portals for the Greater LA area. - -### Action Items -- [x] Add the datasets from the Harker list into the Spreadsheet of cities and counties in Greater LA -- [x] Google to see if there are any others -- [x] find each cities website and record in spreadsheet -- [x] visiting each city's website to confirm they have one or don't have data portal. -- [x] If there are notes about why they don't have it (article, etc). Please provide a link to that info or paste if short. - -### Resources/Instructions -- [Deliverable: Spreadsheet of cities/counties in Greater LA with URLs](https://docs.google.com/spreadsheets/d/1Vjmof0CfFPGyWDlDbChc_hk1MF6pSdGEwcFcAMeQCIQ/edit#gid=0) -- [Wikipedia list of Cities in LA with links to their pages, and each one probably has a link to the official website](https://en.wikipedia.org/wiki/List_of_cities_in_Los_Angeles_County,_California) -- [Harker List - completed](http://www.harker.com/OpenData/socrata-data-portals.html)" -134,"Overview Dashboard - add titles to graphs","feature: guide,role: data science,project: 311-data-dashboards,size: 1pt,epic: 311 public data sets","closed","chelseybeck","None",1,"2021-11-05T02:01:19Z","2022-03-18T20:07:21Z","2021-11-19T04:01:41Z","MEMBER","### Overview -None of the figures on the dashboards currently have titles. This issue is to add titles to the figures on the Overview page on the dev site. - -### Action Items - -- [x] add title to fig1 -- [x] add title to fig2 -- [x] add title to fig3 -- [x] add title to fig4 -- [x] add title to fig5 -- [x] add title to fig6 - - -### Resources/Instructions - -[311-data project onboarding](https://docs.google.com/document/d/1fNIxKJl91YZ_b6PRvKMdyd7tXJwWbfGIAJ5VTfe8Jow/edit?pli=1#heading=h.c8tc614ce3iu) - -Refer to the documentation in the [311-data dashboards directory](https://github.com/hackforla/311-data/tree/dev/server/dash/dashboards) for creating or modifying dashboards - -[Dash Python User Guide](https://dash.plotly.com/) - -Optional: prototype reports in Jupyter Notebooks -examples found [here](https://drive.google.com/drive/u/3/folders/1P-uID8FpnqwvYlUmzWsmgU4pxRSeb72z). -" -133,"Create labels, features, milestones","role: product,dependency,size: 3pt,feature: project management","open","None","None",2,"2021-11-01T23:40:13Z","2024-02-13T03:28:58Z","None","MEMBER","### Dependency -Figure out which (if any) milestones and milestone labels are needed for DS CoP - -### Overview -We need the board to be easy all team members to navigate and use to be effective, so that we can build capacity and velocity - -### Action Items -#### Make labels -- [x] Labels - - [x] features - - [x] roles - - [x] size - - [x] epics -- [x] label check issue for the following #149 - - [x] features - - [x] roles - - [x] size - - [x] epics - -#### Milestones -- [ ] Start spreadsheet -- [ ] meet with Sophia and Ryan -- [ ] create milestones -- [ ] add milestones to all issues -- [ ] re-prioritize all issues in backlog - -### Resources/Instructions -- https://github.com/hackforla/data-science/issues/149" -131,"Story Size for Data Science Issues","role: product,size: 1pt,feature: project management","closed","akhaleghi","None",0,"2021-10-24T18:07:51Z","2021-11-05T20:47:03Z","2021-11-01T23:36:54Z","CONTRIBUTOR","### Overview -This issue explains the meaning of story points that we use for estimating the relative effort required for issues. - -### Resources/Instructions -When creating new issues, add a ""size"" label based on the estimated time effort required to complete the tasks. _Note that each point roughly equates to 6 hours or less of work._ -- size: 1pt - can be done in 6 hours or less -- size: 2pt - can be done in 7-12 hours -- size: 3pt - can be done in 13-18 hours -- size: 5pt - can be done in 19-30 hours -- size: 8pt - can be done in 31-48 hours -- size: 13+pt - must be broken down into smaller issues." -130,"Create a Guide: Web Scraping","Guide: Research,feature: guide,role: org,size: 5pt,CoP: Data Science","open","parcheesime","None",0,"2021-10-22T16:49:52Z","2024-06-05T12:54:03Z","None","CONTRIBUTOR","### Overview -This issue contains resources to help community members learn more about web scraping in Python, including the use of APIs. - -### Action Items -- [ ] Gather resources, including relevant Hack for LA content (i.e. our tutorials and projects that have used web scraping), online courses, and tutorial/how-to web content. - - [ ] Once done, remove the ""Guide: Research"" label and add the ""Guide: Draft Guide"" label -- [ ] Create a draft template, either in markdown format in this issue or a google doc in the [Data Science google drive](https://drive.google.com/drive/folders/1ZBzxr7Wv3IlDsvIM5wAeHflI4op_X01M) - - [ ] Once done, remove the ""Guide: Draft Guide"" label and add the ""Guide: Create Guide"" label -- [ ] Create a guide on how to use the resources contained, including steps on how to get started for volunteers new to web scraping - - [ ] Once done, remove the ""Guide: Create Guide"" label and add the ""Guide: Review Guide"" label -- [ ] Review the guide with Data Science Communities of Practice - - [ ] Once done, remove the ""Guide: Review Guide"" label and add the ""Guide: Leadership Review"" label -- [ ] Present to Hack for LA leadership team for sign off - - [ ] Once approved, remove the ""Guide: Leadership Review"" label and add the ""Guide: Place Guide"" label -- [ ] Include link to guide under Resources if you add it as a template in .github - -### Resources/Instructions -[Hack for LA Web Scraping Tutorial with Selenium/Docker/Python](https://drive.google.com/drive/folders/1t3AO2A8uOcKYrTGKuEzMLnsZPdPUo9Si) -" -124,"Obtain Shape Files for Different District Types as of Nov/Dec 2021","feature: guide,role: data analysis,project duration: one time,size: 1pt,project: district types","closed","None","None",6,"2021-10-14T21:42:29Z","2023-03-31T01:10:05Z","2023-03-31T01:10:04Z","CONTRIBUTOR","### Overview -We need to obtain shape files for the various district types that represent stakeholders we work with. This is part of the epic issue #118 - -### Action Items -- [x] Identify shape file sources for the following district types: - - [x] [Neighborhood Councils (NCs)](https://empowerla.org/councils/) | [_Shape Files Here_](https://geohub.lacity.org/datasets/neighborhood-council-boundaries-2018/explore) - - [x] [Supervisory Districts](https://bos.lacounty.gov/About-Us/Board-of-Supervisors) | [_Shape Files Here_](https://data.lacounty.gov/GIS-Data/Supervisorial-District-2011-/krcx-r26t) - - [x] [Business Improvement Districts (BIDs)](https://clerk.lacity.org/clerk-services/bids) | [_Shape Files Here_](https://data.lacity.org/Housing-and-Real-Estate/Business-Improvement-Districts/733b-ryrk) - - [x] [U.S. House of Representatives that serve LA](http://www.laalmanac.com/government/gu02map.php) | [_Shape Files Here_](https://wedrawthelines.ca.gov/maps-final-draft-congressional-districts/) - - [x] [California State Senators that serve LA](https://www.senate.ca.gov/) | [_Shape Files Here_](https://wedrawthelines.ca.gov/maps-final-draft-senate-districts/) - - [x] [California Assembly members that serve LA](https://www.assembly.ca.gov/assemblymembers) | [_Shape Files Here_](https://wedrawthelines.ca.gov/maps-final-draft-assembly-districts/) - - [x] Zip Code | [_Shape Files Here_](https://data.lacounty.gov/Geospatial/ZIP-Codes/65v5-jw9f) -- [x] Save shape files to the data-science repo - - [x] Neighborhood Councils (NCs) - - [x] Supervisory Districts - - [x] Business Improvement Districts (BIDs) - - [x] U.S. House of Representatives that serve LA - - [x] California State Senators that serve LA - - [x] California Assembly members that serve LA - - [x] Zip Code -- [ ] Develop policy for updating files - - [ ] How often do the boundaries change for the different district types? - - [ ] Neighborhood Councils (NCs) - - [x] Supervisory Districts - - [ ] Business Improvement Districts (BIDs) - - [x] U.S. House of Representatives that serve LA - - [x] California State Senators that serve LA - - [x] California Assembly members that serve LA - - [x] Zip Code - - [ ] Can these be self-updating via APIs? - -### Resources/Instructions -Google Drive [files here](https://drive.google.com/drive/folders/1KsIfAFmp0ArLauvHY1k9wRc9ZXaPDahe) " -121,"Template For Creating Epics For Data Science Projects","role: product,size: 1pt,feature: project management","closed","akhaleghi","None",2,"2021-10-01T17:43:55Z","2024-04-15T21:15:51Z","2021-11-02T19:57:39Z","CONTRIBUTOR","### Overview -We need to develop a uniform way to approach large data science projects and make it easier to identify smaller subtasks to facilitate completion. - -### Action Items -- [x] Write draft template -- [x] Consult CoP and org leads -- [x] Add template to repository - -### Resources/Instructions -Example Epics: -Issues #106 , #107 -[DS repo add template section](https://github.com/hackforla/data-science/issues/templates/edit) -" -120,"Structured Context for HfLA created data sets","feature: guide,role: data science,size: epic,project: structured context,TG: Draft Template,CoP: Data Science","open","None","None",4,"2021-09-30T16:31:22Z","2024-06-05T12:54:03Z","None","MEMBER","### Overview -We need to have minimum standards for structured context in data sets we create and or deploy in our projects so that we can help consumers understand the context from which they come. - -### Action Items -- [ ] Identify existing resources for structured context, adding each example as a link in the resources section - - [ ] Once done, remove the ""TG: Gather Examples"" label and add the ""TG: Draft Template"" label -- [ ] Create a draft template, in all the formats the data will come in (json, csv, etc.) and upload to repo as a pr in the folder structured-context - - [ ] Once done, remove the ""TG: Draft Template"" label and add the ""TG: Create Guide"" label -- [ ] Discuss plan with data science team -- [ ] Create a guide on how to use the template - - [ ] Once done, remove the ""TG: Create Guide"" label and add the ""TG: Review Guide"" label -- [ ] Review the guide with product management communities of practice - - [ ] Once done, remove the ""TG: Review Guide"" label and add the ""TG: Leadership Review"" label -- [ ] Present to Hack for LA leadership team for sign off - - [ ] Once approved, remove the ""TG: Leadership Review"" label and add the ""TG: Place Guide"" label -- [ ] Possibly create an issue template on .github - - [ ] Include link to template under resources if you add it as a template in .github - -### Resources -- Refer to https://github.com/hackforla/product-management/issues/123 for how to write a guide and the guide template - - -### Resources/Instructions -[Datasheets for Data Sets](https://arxiv.org/pdf/1803.09010.pdf) -Definition of [data context](https://simplicable.com/new/data-context) -Article on [data in context](https://www.idashboards.com/blog/2018/05/02/data-in-context/) -Adding context to [data visualizations](https://www.yellowfinbi.com/blog/2017/03/spoonful-of-context-helps-data-visualization-go-down) -[Civic Data Library](https://www.civicdatalibrary.org) -[Civic Software Foundation](https://www.civicdatalibrary.org)" -118,"CoP: Data Science: Create district types reusable tool (API, single dataset, etc.)","feature: guide,role: data analysis,epic,size: epic,size: 1pt","open","parcheesime","None",13,"2021-09-28T18:48:35Z","2024-06-27T16:20:53Z","None","MEMBER","### Overview -We need to create a tool so that each project at H4LA that renders points on a map can use District Files to help people analyze or view the data. - -## Action Items -- [x] Identify large groups/districts -- [x] Identify links for groups/districts -- [x] Locate and obtain shape files for these districts #124 -- [x] Determine what files types we will make these available (shp, npm, and/or GeoJSON) -- [ ] Put files in GitHub repository so they are available to use in the organization. -- [x] research how we will create a data set out of this info that will be self updating (meaning are there apis for these groups) -- [ ] ... - -## Resources -[Example Neighborhood Council Shape File](https://geohub.lacity.org/datasets/neighborhood-council-boundaries-2018/explore) - -#### Initial Identification of Large Groups/Districts - - City [Neighborhood Councils (NCs)](https://empowerla.org/councils/) - - County [Supervisory Districts](https://bos.lacounty.gov/About-Us/Board-of-Supervisors) - - City [Business Improvement Districts (BIDs)](https://clerk.lacity.org/clerk-services/bids) - - National [U.S. House of Representatives that serve LA](http://www.laalmanac.com/government/gu02map.php) - - State [California State Senators that serve LA](https://www.senate.ca.gov/) - - State [California Assembly members that serve LA](https://www.assembly.ca.gov/assemblymembers)" -94,"Data Science Competitive/Comparative Analysis","feature: guide,role: product,size: 2pt","closed","None","None",0,"2021-09-10T19:27:09Z","2023-03-31T01:16:37Z","2023-03-31T01:16:37Z","CONTRIBUTOR","### Overview -We need to know what competitors have on their sites so that we can use it as a factor when we decide what to put on our site. - -### Action Items - -- [x] Examine competitor websites to determine list of features that should be used for DS site. -- [x] Create list of site features, rank by MoSCoW (move to new issue) - -### Resources/Instructions -[Comparative Analysis Document](https://docs.google.com/spreadsheets/d/138Oul1HQLNuuGvIf0P894cmyo4A2Nf2ZSJtZoZTd26U/edit#gid=1259306930) -[Example from Access the Data Team](https://github.com/hackforla/access-the-data/issues/8)" +Issue Number,Title,Labels,State,User Login,User ID,Assignee Login,Assignee ID +204,"Recruit volunteers for team open roles","role: missing,epic,ready for product,size: 0.25pt,feature: recruiting","open","akhaleghi",7635911,"None","None" +203,"Prep project boards for Migration","role: product","open","akhaleghi",7635911,"akhaleghi",7635911 +202,"Information for New and Existing Members","CoP: Data Science","open","akhaleghi",7635911,"None","None" +200,"EPA Data Set","","open","akhaleghi",7635911,"salice",4333657 +198,"Update the About us page on wiki","role: product,feature: onboarding,project duration: one time,ready for product,project: Data Science CoP Maintenance,size: 0.25pt","open","ExperimentsInHonesty",37763229,"max1million101",122141183 +194,"Create data dictionary (EDA task)","feature: missing,role: missing,size: missing,project: missing","open","Lalla22",47159210,"None","None" +193,"DRAFT: Access to ""Third Spaces""","feature: missing,role: missing,size: missing,project: missing","open","akhaleghi",7635911,"None","None" +191,"DRAFT: MediaWiki NLP Project","feature: missing,role: missing,size: missing,project: missing","open","akhaleghi",7635911,"salice",4333657 +190,"Create Base64 and Env File Tutorial","feature: missing,role: missing,size: missing,project: missing","open","kimberlytanyh",76601090,"None","None" +189,"Refining the Requirements for Skills Match Dashboard","role: missing,size: missing,project: missing,feature: skills / productivity dashboard","open","n2020h",72112832,"n2020h",72112832 +183,"MERL Center Data Cleaning","role: missing,project duration: one time,size: 1pt,project: MC Southern Africa NGOs' OS usage","closed","Rabia2219",75643389,"None","None" +182,"CoP: Data Science: Active and Inactive Businesses of LA County","role: data science,epic,size: 3pt,project: EDA","open","akhaleghi",7635911,"rahul897",5197842 +181,"CoP: Data Science: Affordable Housing Access to Food Pantries","role: data science,epic,size: 3pt,project: EDA","closed","akhaleghi",7635911,"MDivyaPrakash",22434989 +180,"City of Los Angeles Arrests","role: data science,epic,size: 3pt,project: EDA","open","akhaleghi",7635911,"dolla24",13824693 +179,"CoP: Data Science: City of Los Angeles Evictions","role: data science,epic,size: 3pt,project: EDA","closed","akhaleghi",7635911,"rahul897",5197842 +178,"CoP: Data Science: Find data sources through the LA Controller's office","documentation,good first issue,role: data analysis,size: 1pt,project: missing","open","akhaleghi",7635911,"FragariaChemist",103977933 +177,"Create 311 data CSV files that can be accessed through a Jupyter notebook","role: data science,project: 311 Data,size: 3pt,feature: project management","closed","akhaleghi",7635911,"mru-hub",105573589 +170,"California Grants Portal Data Set EDA","role: data analysis,project duration: ongoing,size: 1pt,project: EDA","closed","akhaleghi",7635911,"jossus657",97187879 +169,"Potential Projects: California Grants Portal","role: data science,epic,size: epic,dependency,project: EDA","closed","akhaleghi",7635911,"mihikasahani",102847564 +163,"CoP: Data Science: Complete Intro to Command Line and Git Tutorial","feature: guide,role: data science,size: 1pt,role: Data Engineer","open","akhaleghi",7635911,"None","None" +162,"CoP: Data Science: Complete Intro to Python Tutorial","feature: guide,role: data science,size: 1pt,draft,ready for product","open","akhaleghi",7635911,"None","None" +161,"Project Name: Lucky Parking","feature: missing,role: data analysis,size: 1pt,project: missing","closed","gregpawin",36276149,"None","None" +157,"CoP: Data Science: Create Deep Learning Tutorial","documentation,feature: guide,role: data science,size: 1pt","open","akhaleghi",7635911,"elliealbertson",120353163 +156,"CoP: Data Science: Create Machine Learning Tutorial","documentation,feature: guide,role: data science,size: 1pt","open","akhaleghi",7635911,"BhavanaSai12",173202606 +155,"CoP: Data Science: Create Stats Tutorial","documentation,feature: guide,role: data science,role: data analysis,size: 1pt","open","akhaleghi",7635911,"None","None" +154,"CoP: Data Science: Create Data Ops Tutorial","documentation,feature: guide,size: 1pt,role: Data Engineer","open","akhaleghi",7635911,"None","None" +153,"CoP: Data Science: Create Text Analysis Tutorial","documentation,feature: guide,role: data analysis,size: 1pt","open","akhaleghi",7635911,"bfang22",111911687 +152,"CoP: Data Science: Open Source Projects Data Set for Climate projects","role: data analysis,size: 1pt,epic: GreenEarthOS,project: climate projects,draft","open","akhaleghi",7635911,"noneill256",100643509 +149,"Weekly Label Check","role: product,size: 1pt,feature: project management","open","ExperimentsInHonesty",37763229,"None","None" +160,"CoP: Data Science: Survey: Repo Labels","project duration: ongoing,size: 2pt,time sensitive,role: Data Engineer,feature: labels,feature: Issues Dashboard","closed","Neecolaa",7437035,"jossus657",97187879 +148,"Create Geospatial Data Analysis Tutorial","documentation,feature: guide,role: org,size: 1pt","closed","ryanmswan",2508797,"mcmorgan27",3988983 +147,"Create Data Engineering Tutorial","documentation,feature: guide,role: org,size: 1pt","closed","ryanmswan",2508797,"jonmelvin",25702027 +146,"Create Web Scraping Tutorial","documentation,feature: guide,role: org,size: 1pt","open","ryanmswan",2508797,"parcheesime",38143160 +145,"CoP: Data Science: Create Data Analysis With R Tutorial","documentation,feature: guide,role: data analysis,size: 1pt","open","ryanmswan",2508797,"xuyeluo",169718490 +144,"Create Data Visualization Tutorial","documentation,feature: guide,role: org,size: 1pt,feature: needs peer review","closed","ryanmswan",2508797,"None","None" +143,"Create ETL/Data Cleaning Tutorial","documentation,feature: guide,role: org,size: 1pt","closed","ryanmswan",2508797,"ryanmswan",2508797 +141,"Obtain Shape Files for Different District Types (2023)","good first issue,role: data analysis,project duration: one time,dependency,size: 1pt,project: district types","closed","akhaleghi",7635911,"mru-hub",105573589 +140,"Create a logo for the Data Science CoP","project duration: one time,size: 2pt,feature: branding,role: design","closed","henrykaplan",50933869,"None","None" +138,"Starter Project for New Data Analysts and Data Scientists","role: product,role: CoP lead,feature: onboarding,size: epic,dependency","closed","akhaleghi",7635911,"None","None" +137,"Data Science CoP Meeting Agendas (Monday 7PM PST)","feature: guide,role: product,size: 1pt,feature: agenda","open","akhaleghi",7635911,"akhaleghi",7635911 +135,"CoP: Data Science: Find and document all the 311 public data sets","role: data analysis,project: 311 Data,size: 2pt,epic: 311 public data sets","open","ExperimentsInHonesty",37763229,"venkata-sai-swathi",61697502 +134,"Overview Dashboard - add titles to graphs","feature: guide,role: data science,project: 311-data-dashboards,size: 1pt,epic: 311 public data sets","closed","chelseybeck",64881557,"chelseybeck",64881557 +133,"Create labels, features, milestones","role: product,dependency,size: 3pt,feature: project management","open","ExperimentsInHonesty",37763229,"None","None" +131,"Story Size for Data Science Issues","role: product,size: 1pt,feature: project management","closed","akhaleghi",7635911,"akhaleghi",7635911 +130,"Create a Guide: Web Scraping","Guide: Research,feature: guide,role: org,size: 5pt,CoP: Data Science","open","akhaleghi",7635911,"parcheesime",38143160 +124,"Obtain Shape Files for Different District Types as of Nov/Dec 2021","feature: guide,role: data analysis,project duration: one time,size: 1pt,project: district types","closed","akhaleghi",7635911,"None","None" +121,"Template For Creating Epics For Data Science Projects","role: product,size: 1pt,feature: project management","closed","akhaleghi",7635911,"akhaleghi",7635911 +120,"Structured Context for HfLA created data sets","feature: guide,role: data science,size: epic,project: structured context,TG: Draft Template,CoP: Data Science","open","ExperimentsInHonesty",37763229,"None","None" +118,"CoP: Data Science: Create district types reusable tool (API, single dataset, etc.)","feature: guide,role: data analysis,epic,size: epic,size: 1pt","open","ExperimentsInHonesty",37763229,"parcheesime",38143160 +94,"Data Science Competitive/Comparative Analysis","feature: guide,role: product,size: 2pt","closed","akhaleghi",7635911,"None","None" From e3a668a3dd201ac7ec46f9739e50ff53ec2a3267 Mon Sep 17 00:00:00 2001 From: n2020h <72112832+n2020h@users.noreply.github.com> Date: Mon, 22 Jul 2024 19:38:03 -0400 Subject: [PATCH 18/49] Create pull_requests_to_csv.yml --- .github/workflows/pull_requests_to_csv.yml | 66 ++++++++++++++++++++++ 1 file changed, 66 insertions(+) create mode 100644 .github/workflows/pull_requests_to_csv.yml diff --git a/.github/workflows/pull_requests_to_csv.yml b/.github/workflows/pull_requests_to_csv.yml new file mode 100644 index 0000000..088561e --- /dev/null +++ b/.github/workflows/pull_requests_to_csv.yml @@ -0,0 +1,66 @@ +name: List Pull Requests and Output as CSV + +on: + push: + branches: + - n2020h-issues-to-csv + workflow_dispatch: + schedule: + - cron: '0 0 * * *' # Runs daily at midnight + pull_request: + types: [opened, closed, reopened] + branches: + - n2020h-issues-to-csv + +jobs: + list-pull-requests: + runs-on: ubuntu-latest + + steps: + # Checkout the repository to access any scripts or tools you might need + - name: Checkout repository + uses: actions/checkout@v3 + + # Set up Node.js to use jq command + - name: Set up Node.js + uses: actions/setup-node@v3 + with: + node-version: '16' + + # Fetch pull requests from GitHub API and generate CSV + - name: Fetch pull requests and generate CSV + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + # Fetch pull requests data and save it to pulls.json + curl -H "Authorization: token $GITHUB_TOKEN" \ + -H "Accept: application/vnd.github.v3+json" \ + https://api.github.com/repos/${{ github.repository }}/pulls?state=all \ + -o pulls.json + + # Use jq to parse pulls.json and generate my_org_PRs.csv + jq -r '[.[] | { + "PR Number": .number, + "Title": .title, + "Description": .body, + "Author": .user.login, + "State": .state, + "Number of Commits": .commits, + "Number of Files Changed": .changed_files, + "Labels": (.labels | map(.name) | join(",")), + "Assignees": (.assignees | map(.login) | join(",")), + "Linked Issues": ([capture("#(?\\d+)") | .number] | join(",")), + "Reviewers": (.requested_reviewers | map(.login) | join(",")) + }] | (first | keys_unsorted) as $keys | $keys, map([.[ $keys[] ]])[] | @csv' \ + pulls.json > hackforla_pull_requests.csv + + # Commit and push the generated CSV to the repository + - name: Commit and push CSV + run: | + git config --local user.email "action@github.com" + git config --local user.name "GitHub Action" + git add my_org_PRs.csv + git commit -m "Update pull requests CSV" + git push + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} From ea2b851cdf4df24b6af49fcb2e37444c75239c81 Mon Sep 17 00:00:00 2001 From: n2020h <72112832+n2020h@users.noreply.github.com> Date: Mon, 22 Jul 2024 21:41:35 -0400 Subject: [PATCH 19/49] Debug pull_requests_to_csv.yml Debug error in "Generate pull requests" --- .github/workflows/pull_requests_to_csv.yml | 47 ++++++++++------------ 1 file changed, 21 insertions(+), 26 deletions(-) diff --git a/.github/workflows/pull_requests_to_csv.yml b/.github/workflows/pull_requests_to_csv.yml index 088561e..109a4b5 100644 --- a/.github/workflows/pull_requests_to_csv.yml +++ b/.github/workflows/pull_requests_to_csv.yml @@ -21,24 +21,18 @@ jobs: - name: Checkout repository uses: actions/checkout@v3 - # Set up Node.js to use jq command - - name: Set up Node.js - uses: actions/setup-node@v3 - with: - node-version: '16' + # # Set up Node.js to use jq command + # - name: Set up Node.js + # uses: actions/setup-node@v3 + # with: + # node-version: '16' - # Fetch pull requests from GitHub API and generate CSV - - name: Fetch pull requests and generate CSV - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + # Generate pull requests CSV + - name: Generate pull requests CSV run: | - # Fetch pull requests data and save it to pulls.json - curl -H "Authorization: token $GITHUB_TOKEN" \ - -H "Accept: application/vnd.github.v3+json" \ - https://api.github.com/repos/${{ github.repository }}/pulls?state=all \ - -o pulls.json - - # Use jq to parse pulls.json and generate my_org_PRs.csv + echo "PR Number,Title,Description,Author,State,Number of Commits,Number of Files Changed,Labels,Assignees,Linked Issues,Reviewers" > my_org_PRs.csv + curl -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \ + "https://api.github.com/repos/${{ github.repository }}/pulls?state=all&per_page=100" | \ jq -r '[.[] | { "PR Number": .number, "Title": .title, @@ -49,18 +43,19 @@ jobs: "Number of Files Changed": .changed_files, "Labels": (.labels | map(.name) | join(",")), "Assignees": (.assignees | map(.login) | join(",")), - "Linked Issues": ([capture("#(?\\d+)") | .number] | join(",")), + "Linked Issues": (.body | capture_all("#(?\\d+)"; "g") | join(",")), "Reviewers": (.requested_reviewers | map(.login) | join(",")) - }] | (first | keys_unsorted) as $keys | $keys, map([.[ $keys[] ]])[] | @csv' \ - pulls.json > hackforla_pull_requests.csv + }] | (first | keys_unsorted) as $keys | $keys, map([.[ $keys[] ]])[] | @csv' >> hackforla_pull_requests.csv + # Commit and push the generated CSV to the repository - name: Commit and push CSV run: | - git config --local user.email "action@github.com" - git config --local user.name "GitHub Action" - git add my_org_PRs.csv - git commit -m "Update pull requests CSV" - git push - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + git config user.name "Automated" + git config user.email "actions@users.noreply.github.com" + git add -f hackforla_pull_requests.csv + timestamp=$(date -u) + git commit -m "Latest data: ${timestamp}" || exit 0 + git push --force origin HEAD:refs/heads/n2020h-issues-to-csv + # env: + # GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} From f7098af2a61419c2c4d7231ccb7769253b0e32a1 Mon Sep 17 00:00:00 2001 From: n2020h <72112832+n2020h@users.noreply.github.com> Date: Mon, 22 Jul 2024 21:46:15 -0400 Subject: [PATCH 20/49] Update node.js --- .github/workflows/pull_requests_to_csv.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/pull_requests_to_csv.yml b/.github/workflows/pull_requests_to_csv.yml index 109a4b5..a839415 100644 --- a/.github/workflows/pull_requests_to_csv.yml +++ b/.github/workflows/pull_requests_to_csv.yml @@ -21,11 +21,11 @@ jobs: - name: Checkout repository uses: actions/checkout@v3 - # # Set up Node.js to use jq command - # - name: Set up Node.js - # uses: actions/setup-node@v3 - # with: - # node-version: '16' + # Set up Node.js to use jq command + - name: Set up Node.js + uses: actions/setup-node@v3 + with: + node-version: '20' # Generate pull requests CSV - name: Generate pull requests CSV From 8a3fd1e931ee5fddf4cff5f22f72af0918d6a8df Mon Sep 17 00:00:00 2001 From: n2020h <72112832+n2020h@users.noreply.github.com> Date: Mon, 22 Jul 2024 21:47:34 -0400 Subject: [PATCH 21/49] Update GH token --- .github/workflows/pull_requests_to_csv.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pull_requests_to_csv.yml b/.github/workflows/pull_requests_to_csv.yml index a839415..c481dc1 100644 --- a/.github/workflows/pull_requests_to_csv.yml +++ b/.github/workflows/pull_requests_to_csv.yml @@ -57,5 +57,5 @@ jobs: timestamp=$(date -u) git commit -m "Latest data: ${timestamp}" || exit 0 git push --force origin HEAD:refs/heads/n2020h-issues-to-csv - # env: - # GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} From a0d7f3d6f445505624d064142b15e00ec57ffd26 Mon Sep 17 00:00:00 2001 From: n2020h <72112832+n2020h@users.noreply.github.com> Date: Mon, 22 Jul 2024 21:52:41 -0400 Subject: [PATCH 22/49] Debug pull_requests_to_csv.yml changed output file name --- .github/workflows/pull_requests_to_csv.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pull_requests_to_csv.yml b/.github/workflows/pull_requests_to_csv.yml index c481dc1..0ea9261 100644 --- a/.github/workflows/pull_requests_to_csv.yml +++ b/.github/workflows/pull_requests_to_csv.yml @@ -30,7 +30,7 @@ jobs: # Generate pull requests CSV - name: Generate pull requests CSV run: | - echo "PR Number,Title,Description,Author,State,Number of Commits,Number of Files Changed,Labels,Assignees,Linked Issues,Reviewers" > my_org_PRs.csv + echo "PR Number,Title,Description,Author,State,Number of Commits,Number of Files Changed,Labels,Assignees,Linked Issues,Reviewers" > hackforla_pull_requests.csv curl -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \ "https://api.github.com/repos/${{ github.repository }}/pulls?state=all&per_page=100" | \ jq -r '[.[] | { From 2b17fbbe98bc153de8c6fe6d4d6deb47bcfbbe12 Mon Sep 17 00:00:00 2001 From: n2020h <72112832+n2020h@users.noreply.github.com> Date: Mon, 22 Jul 2024 22:08:42 -0400 Subject: [PATCH 23/49] Update pull_requests_to_csv.yml --- .github/workflows/pull_requests_to_csv.yml | 49 ++++++++++------------ 1 file changed, 23 insertions(+), 26 deletions(-) diff --git a/.github/workflows/pull_requests_to_csv.yml b/.github/workflows/pull_requests_to_csv.yml index 0ea9261..e25b880 100644 --- a/.github/workflows/pull_requests_to_csv.yml +++ b/.github/workflows/pull_requests_to_csv.yml @@ -7,10 +7,10 @@ on: workflow_dispatch: schedule: - cron: '0 0 * * *' # Runs daily at midnight - pull_request: - types: [opened, closed, reopened] - branches: - - n2020h-issues-to-csv + # pull_request: + # types: [opened, closed, reopened] + # branches: + # - n2020h-issues-to-csv jobs: list-pull-requests: @@ -22,38 +22,35 @@ jobs: uses: actions/checkout@v3 # Set up Node.js to use jq command - - name: Set up Node.js - uses: actions/setup-node@v3 - with: - node-version: '20' + # - name: Set up Node.js + # uses: actions/setup-node@v3 + # with: + # node-version: '20' # Generate pull requests CSV - name: Generate pull requests CSV run: | - echo "PR Number,Title,Description,Author,State,Number of Commits,Number of Files Changed,Labels,Assignees,Linked Issues,Reviewers" > hackforla_pull_requests.csv - curl -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \ - "https://api.github.com/repos/${{ github.repository }}/pulls?state=all&per_page=100" | \ - jq -r '[.[] | { - "PR Number": .number, - "Title": .title, - "Description": .body, - "Author": .user.login, - "State": .state, - "Number of Commits": .commits, - "Number of Files Changed": .changed_files, - "Labels": (.labels | map(.name) | join(",")), - "Assignees": (.assignees | map(.login) | join(",")), - "Linked Issues": (.body | capture_all("#(?\\d+)"; "g") | join(",")), - "Reviewers": (.requested_reviewers | map(.login) | join(",")) - }] | (first | keys_unsorted) as $keys | $keys, map([.[ $keys[] ]])[] | @csv' >> hackforla_pull_requests.csv - + run: | + echo "PR Number,Title,Description,Author,State,Number of Commits,Number of Files Changed,Labels,Assignees,Reviewers" > hackforla_PRs.csv + jq -r '.[] | [ + .number, + .title, + .body, + .user.login, + .state, + .commits, + .changed_files, + (.labels | map(.name) | join(",")), + (.assignees | map(.login) | join(",")), + (.requested_reviewers | map(.login) | join(",")) + ] | @csv' pulls.json >> hackforla_PRs.csv # Commit and push the generated CSV to the repository - name: Commit and push CSV run: | git config user.name "Automated" git config user.email "actions@users.noreply.github.com" - git add -f hackforla_pull_requests.csv + git add -f hackforla_PRs.csv timestamp=$(date -u) git commit -m "Latest data: ${timestamp}" || exit 0 git push --force origin HEAD:refs/heads/n2020h-issues-to-csv From 925e5ffe225e858e910c032fa6ab0af46048d197 Mon Sep 17 00:00:00 2001 From: n2020h <72112832+n2020h@users.noreply.github.com> Date: Mon, 29 Jul 2024 12:41:47 -0400 Subject: [PATCH 24/49] Debug & breakpoint pull_requests_to_csv.yml --- .github/workflows/pull_requests_to_csv.yml | 72 ++++++++++++++++++---- 1 file changed, 61 insertions(+), 11 deletions(-) diff --git a/.github/workflows/pull_requests_to_csv.yml b/.github/workflows/pull_requests_to_csv.yml index e25b880..8d92fa3 100644 --- a/.github/workflows/pull_requests_to_csv.yml +++ b/.github/workflows/pull_requests_to_csv.yml @@ -11,7 +11,6 @@ on: # types: [opened, closed, reopened] # branches: # - n2020h-issues-to-csv - jobs: list-pull-requests: runs-on: ubuntu-latest @@ -21,17 +20,32 @@ jobs: - name: Checkout repository uses: actions/checkout@v3 +###------------------------------------------------------####### # Set up Node.js to use jq command - # - name: Set up Node.js - # uses: actions/setup-node@v3 - # with: - # node-version: '20' + # trying thiss + # trying this!!! + - name: Set up Node.js + uses: actions/setup-node@v3 + with: + node-version: '20' + # Fetch pull requests data and save it to pulls.json + - name: Fetch pull requests data + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + curl -H "Authorization: token $GITHUB_TOKEN" \ + -H "Accept: application/vnd.github.v3+json" \ + "https://api.github.com/repos/${{ github.repository }}/pulls?state=all&per_page=100" \ + -o pulls.json + + # Check the content of pulls.json for debugging + - name: Display pulls.json content + run: cat pulls.json # Generate pull requests CSV - name: Generate pull requests CSV run: | - run: | - echo "PR Number,Title,Description,Author,State,Number of Commits,Number of Files Changed,Labels,Assignees,Reviewers" > hackforla_PRs.csv + echo "PR Number,Title,Description,Author,State,Number of Commits,Number of Files Changed,Labels,Assignees,Reviewers" > my_org_PRs.csv jq -r '.[] | [ .number, .title, @@ -43,16 +57,52 @@ jobs: (.labels | map(.name) | join(",")), (.assignees | map(.login) | join(",")), (.requested_reviewers | map(.login) | join(",")) - ] | @csv' pulls.json >> hackforla_PRs.csv + ] | @csv' pulls.json >> my_org_PRs.csv + + # Check the content of my_org_PRs.csv for debugging + - name: Display my_org_PRs.csv content + run: cat my_org_PRs.csv # Commit and push the generated CSV to the repository - name: Commit and push CSV run: | git config user.name "Automated" git config user.email "actions@users.noreply.github.com" - git add -f hackforla_PRs.csv + git add -f my_org_PRs.csv timestamp=$(date -u) - git commit -m "Latest data: ${timestamp}" || exit 0 - git push --force origin HEAD:refs/heads/n2020h-issues-to-csv + git commit -m "Latest pull requests data: ${timestamp}" || exit 0 + git push --force origin HEAD:refs/heads/n2020h-prs-to-csv env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + +####-------------------------------------------------####### + + # # Generate pull requests CSV + # - name: Generate pull requests CSV + # run: | + # run: | + # echo "PR Number,Title,Description,Author,State,Number of Commits,Number of Files Changed,Labels,Assignees,Reviewers" > hackforla_PRs.csv + # jq -r '.[] | [ + # .number, + # .title, + # .body, + # .user.login, + # .state, + # .commits, + # .changed_files, + # (.labels | map(.name) | join(",")), + # (.assignees | map(.login) | join(",")), + # (.requested_reviewers | map(.login) | join(",")) + # ] | @csv' pulls.json >> hackforla_PRs.csv + + # # Commit and push the generated CSV to the repository + # - name: Commit and push CSV + # run: | + # git config user.name "Automated" + # git config user.email "actions@users.noreply.github.com" + # git add -f hackforla_PRs.csv + # timestamp=$(date -u) + # git commit -m "Latest data: ${timestamp}" || exit 0 + # git push --force origin HEAD:refs/heads/n2020h-issues-to-csv + # env: + # GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} From d730c20f3832cdfcb9d9e6238f4c7e5cfdd77d8b Mon Sep 17 00:00:00 2001 From: n2020h <72112832+n2020h@users.noreply.github.com> Date: Mon, 29 Jul 2024 13:07:02 -0400 Subject: [PATCH 25/49] Removed dependabot details.yml --- .github/workflows/pull_requests_to_csv.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pull_requests_to_csv.yml b/.github/workflows/pull_requests_to_csv.yml index 8d92fa3..2bb2d9c 100644 --- a/.github/workflows/pull_requests_to_csv.yml +++ b/.github/workflows/pull_requests_to_csv.yml @@ -44,9 +44,9 @@ jobs: # Generate pull requests CSV - name: Generate pull requests CSV - run: | + | echo "PR Number,Title,Description,Author,State,Number of Commits,Number of Files Changed,Labels,Assignees,Reviewers" > my_org_PRs.csv - jq -r '.[] | [ + jq -r '.[] | select(.user.login != "dependabot[bot]") | [ .number, .title, .body, From 488b0bd0c9f93a85f4b024505c015c14f65a0ed2 Mon Sep 17 00:00:00 2001 From: Automated Date: Mon, 29 Jul 2024 17:07:19 +0000 Subject: [PATCH 26/49] Latest data: Mon Jul 29 17:07:19 UTC 2024 --- issues.csv | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/issues.csv b/issues.csv index 812d233..b4441d7 100644 --- a/issues.csv +++ b/issues.csv @@ -14,7 +14,7 @@ Issue Number,Title,Labels,State,User Login,User ID,Assignee Login,Assignee ID 181,"CoP: Data Science: Affordable Housing Access to Food Pantries","role: data science,epic,size: 3pt,project: EDA","closed","akhaleghi",7635911,"MDivyaPrakash",22434989 180,"City of Los Angeles Arrests","role: data science,epic,size: 3pt,project: EDA","open","akhaleghi",7635911,"dolla24",13824693 179,"CoP: Data Science: City of Los Angeles Evictions","role: data science,epic,size: 3pt,project: EDA","closed","akhaleghi",7635911,"rahul897",5197842 -178,"CoP: Data Science: Find data sources through the LA Controller's office","documentation,good first issue,role: data analysis,size: 1pt,project: missing","open","akhaleghi",7635911,"FragariaChemist",103977933 +178,"CoP: Data Science: Find data sources through the LA Controller's office","documentation,good first issue,role: data analysis,size: 1pt,project: missing","open","akhaleghi",7635911,"RomyPatel",54936257 177,"Create 311 data CSV files that can be accessed through a Jupyter notebook","role: data science,project: 311 Data,size: 3pt,feature: project management","closed","akhaleghi",7635911,"mru-hub",105573589 170,"California Grants Portal Data Set EDA","role: data analysis,project duration: ongoing,size: 1pt,project: EDA","closed","akhaleghi",7635911,"jossus657",97187879 169,"Potential Projects: California Grants Portal","role: data science,epic,size: epic,dependency,project: EDA","closed","akhaleghi",7635911,"mihikasahani",102847564 From 57aad64b8e56942be7efecd1883f6d8928bbf508 Mon Sep 17 00:00:00 2001 From: n2020h <72112832+n2020h@users.noreply.github.com> Date: Mon, 29 Jul 2024 13:13:28 -0400 Subject: [PATCH 27/49] Update pull_requests_to_csv.yml --- .github/workflows/pull_requests_to_csv.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pull_requests_to_csv.yml b/.github/workflows/pull_requests_to_csv.yml index 2bb2d9c..9275993 100644 --- a/.github/workflows/pull_requests_to_csv.yml +++ b/.github/workflows/pull_requests_to_csv.yml @@ -71,7 +71,7 @@ jobs: git add -f my_org_PRs.csv timestamp=$(date -u) git commit -m "Latest pull requests data: ${timestamp}" || exit 0 - git push --force origin HEAD:refs/heads/n2020h-prs-to-csv + git push --force origin HEAD:refs/heads/n2020h-issues-to-csv env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} From c933e9253891b4e7b8dba5debe323a872bef7c82 Mon Sep 17 00:00:00 2001 From: n2020h <72112832+n2020h@users.noreply.github.com> Date: Mon, 29 Jul 2024 18:39:59 -0400 Subject: [PATCH 28/49] Update pull_requests_to_csv.yml --- .github/workflows/pull_requests_to_csv.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pull_requests_to_csv.yml b/.github/workflows/pull_requests_to_csv.yml index 9275993..389a0ab 100644 --- a/.github/workflows/pull_requests_to_csv.yml +++ b/.github/workflows/pull_requests_to_csv.yml @@ -44,7 +44,7 @@ jobs: # Generate pull requests CSV - name: Generate pull requests CSV - | + run: | echo "PR Number,Title,Description,Author,State,Number of Commits,Number of Files Changed,Labels,Assignees,Reviewers" > my_org_PRs.csv jq -r '.[] | select(.user.login != "dependabot[bot]") | [ .number, From 5df86a4fc7fe1b74e8be32395d4fed40119d48f0 Mon Sep 17 00:00:00 2001 From: Automated Date: Mon, 29 Jul 2024 22:40:16 +0000 Subject: [PATCH 29/49] Latest pull requests data: Mon Jul 29 22:40:16 UTC 2024 --- my_org_PRs.csv | 154 +++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 154 insertions(+) create mode 100644 my_org_PRs.csv diff --git a/my_org_PRs.csv b/my_org_PRs.csv new file mode 100644 index 0000000..4c2fa8f --- /dev/null +++ b/my_org_PRs.csv @@ -0,0 +1,154 @@ +PR Number,Title,Description,Author,State,Number of Commits,Number of Files Changed,Labels,Assignees,Reviewers +205,"Added data loading and cleaning Jupyter notebook","- Initial implementation of data loading/ saving and cleaning. +- Improvements planned for future versions.","mru-hub","closed",,,"","","" +201,"feat: City of Los Angeles Evictions #179","This commit introduces basic EDA on the LA evictions data, It introduces explode/splitting of Just Cause column for effective analysis + +starting #179 ","rahul897","open",,,"","","" +199,"177 create 311 data csv files that can be accessed through a jupyter notebook","Cleaning rules document from the 311-data","mru-hub","closed",,,"","","salice" +197,"N2020h issues to csv",,"n2020h","open",,,"","","" +196,"186 Adding Crime Data",,"dolla24","closed",,,"","dolla24","" +195,"fix name of image to work with Windows",,"salice","closed",,,"","","" +192,"Create issues-to-csv.yml","testing feature branch with draft pull request","n2020h","closed",,,"","","" +188,"Moving Workshop-Feedback_Bigram_Analysis.ipynb from access-the-data repo","Partially fixes https://github.com/hackforla/website/issues/6139 + +### What changes did you make? +- Adding Workshop-Feedback_Bigram_Analysis.ipynb from access-the-data repo (commit: https://github.com/hackforla/access-the-data/commit/62609601277509b0473bbf8f6dbf1ef735430c84) to this repo + +### Why did you make the changes? +- Requested by Bonnie per [this comment](https://github.com/hackforla/website/issues/5127#issuecomment-1776306588) +- AJ Price, Access the Data's PM, didn't want Jupyter Notebook to show up as a Language for the project, so Bonnie recommended moving the Workshop-Feedback_Bigram_Analysis.ipynb file to the Data Science repo. + +### Additional Notes +- I recommend using rebase merging to preserve who authored the file. +- To see examples of the commit history of the various methods of merging, see these branches in my forked repo: + - [Using rebase & merge](https://github.com/JessicaLucindaCheng/data-science/commits/rebase-merge): Recommended + - [Using merge commit](https://github.com/JessicaLucindaCheng/data-science/commits/merge-commit): Okay but results in 2 commits + - [Using squash & merge](https://github.com/JessicaLucindaCheng/data-science/commits/squash-merge): Not recommended because it doesn't preserve author's credit","JessicaLucindaCheng","closed",,,"","","" +187,"Moving Workshop-Feedback_Bigram_Analysis.ipynb from access-the-data repo","Fixes N/A - + +### What changes did you make? +- Adding Workshop-Feedback_Bigram_Analysis.ipynb from access-the-data repo (commit: https://github.com/hackforla/access-the-data/commit/62609601277509b0473bbf8f6dbf1ef735430c84) to this repo + +### Why did you make the changes? +- Requested by Bonnie per [this comment](https://github.com/hackforla/website/issues/5127#issuecomment-1776306588) +- AJ Price, Access the Data's PM, didn't want Jupyter Notebook to show up as a Language for the project, so Bonnie recommended moving the Workshop-Feedback_Bigram_Analysis.ipynb file to the Data Science repo. + +### Additional Notes +- I recommend using rebase merging to preserve who authored the file. +- To see examples of the commit history of the various methods of merging, see these branches in my forked repo: + - [Using rebase & merge](https://github.com/JessicaLucindaCheng/data-science/commits/rebase-merge): Recommended + - [Using merge commit](https://github.com/JessicaLucindaCheng/data-science/commits/merge-commit): Okay but results in 2 commits + - [Using squash & merge](https://github.com/JessicaLucindaCheng/data-science/commits/squash-merge): Not recommended because it doesn't preserve author's credit","JessicaLucindaCheng","closed",,,"","","" +186,"Lalla la crime analysis 2",,"Lalla22","open",,,"","","salice" +185,"Create README.md",,"Lalla22","closed",,,"","","" +184,"git c",,"Lalla22","closed",,,"","","" +168,"Updating label analysis jupyter notebook with co-occurrence analysis",,"rbianchetti","open",,,"","","" +167,"Merge pull request #164 from hackforla/160-survey-repo-labels","A python script that gets all the labels used in open issues across a…","codemamma","open",,,"","","" +164,"A python script that gets all the labels used in open issues across a…","…n organization(s) via GitHub API","rbianchetti","closed",,,"","","" +159,"Linking latest pedestrian safety report presentation","This replaces a previous powerpoint presentation stored in the repo with links to the current report and presentation saved in Hack for LA's shared google drive.","henrykaplan","open",,,"","","akhaleghi" +158,"Data Science logo links and image files","The latest version of the Hack for LA data science logo is being worked on in Google Drive. I've updated the directory in the git repo with links to the latest and with png and svg versions of the latest logo files.","henrykaplan","closed",,,"","","akhaleghi" +139,"Removed spaces from all filenames in pedestrian data directory","Changed filenames of all output files and graphics. No changes to file contents — these are final graphics and reports, and aren't aren't referenced by file path anywhere.","henrykaplan","closed",,,"","","" +132,"Pedestrian safety","Added readme documentation for downloading and organizing data, running Jupyter notebooks, and reorganized analysis files.","henrykaplan","closed",,,"","","" +123,"Create a new issue template for epics",,"akhaleghi","closed",,,"","","" +122,"Pedestrian safety draft visualizations and presentation","Further simplified and clarified maps and traffic accident charts, added these new charts and related discussion to the presentation file.","henrykaplan","closed",,,"","","AlbertUlysses" +117,"Pedestrian safety","- Add optional parameter to mapillary wrapper client to allow data requests of recent data only, to quickly update existing datasets. +- Better maps and visualizations. +- Use DBScan clustering algorithm to find clusters of traffic accidents (simplest way to roughly group accidents at specific intersections, as the accident data is mostly based around intersections). +- Updated report ppt for August 5 presentation to data science group","henrykaplan","closed",,,"","","" +116,"webscraping folder initial commit","[Edited] + +This PR is a stand alone folder added to the 311-data folder. It contains a webscraping script for the tech stacks of each NC survey. See issue [44](https://github.com/hackforla/data-science/issues/44). The readme contains a link to the google sheets table of the tech used for each website.","rajindermavi","closed",,,"","","" +115,"docs","updated readme to include sample queries that LAANE is interested in doing against the database","AlbertUlysses","closed",,,"","","" +114,"docs","Update README/todos","AlbertUlysses","closed",,,"","","" +113,"docs","Updated misc docs","AlbertUlysses","closed",,,"","","" +111,"Docs!","Updated docs like readme and add extra notes to some other files","AlbertUlysses","closed",,,"","","" +105,"debug","debugged some code that was entering incorrect addresses","AlbertUlysses","closed",,,"","","" +104,"docs/clean up","updated README and removed some old files.","AlbertUlysses","closed",,,"","","" +103,"feat: assessor script","Last script, passes tests refactored a bit","AlbertUlysses","closed",,,"","","" +102,"docs","Update Readme +removed a lot of personal notes aimed for me during script writing","AlbertUlysses","closed",,,"","","" +101,"debug categorically inelligible","debug categorically inelligibl data","AlbertUlysses","closed",,,"","","" +100,"New scripts + debug","added some code that debugs the ofs and luxly datasets. +Added a new script that handles warnings and citations.","AlbertUlysses","closed",,,"","","" +99,"bugfix","fixed bugs across 4 files that dealth with addresses that returned 0 for zipcode instead of a 5 number digit","AlbertUlysses","closed",,,"","","" +98,"feat: new script for warning","This script is for warning files that don't have only addresses and dates.","AlbertUlysses","closed",,,"","","" +97,"Two commits","First commit fixes the warning script file. +The second commit adds an extra line describe what the script is for. +","AlbertUlysses","closed",,,"","","" +96,"feat: new processesing script","script for airbnb reviews is complete.","AlbertUlysses","closed",,,"","","" +95,"feat: airbnb dataset script","airbnb script that uploads listings and host information ","AlbertUlysses","closed",,,"","","" +93,"debug one fine stay scripts","debug one fine stay scripts for address2 errors","AlbertUlysses","closed",,,"","","" +92,"debug","debugged the data from one fine stay.","AlbertUlysses","closed",,,"","","" +91,"debug","Did some debugging for luxly platform script","AlbertUlysses","closed",,,"","","" +90,"fixbug in hsodenials","hsodenials bugs are remved, hsodenials script is good to go.","AlbertUlysses","closed",,,"","","" +89,"debug","Debugged the hso_registrant enteries.","AlbertUlysses","closed",,,"","","" +88,"bugfix: fixed exempt script","fixed exempt table to match the rest of the database","AlbertUlysses","closed",,,"","","" +87,"fixbug: fixed bug in categorically inelligible","fixed categoically ineligible bug","AlbertUlysses","closed",,,"","","" +86,"bug fix: fixed bug for complaints data","fixed complaints data to not have nulls and to not allow any state that don't use two letter abbreviation. ","AlbertUlysses","closed",,,"","","" +85,"refactor/debug: debugging each script","Refactored tot script to make sure it is uploading to sqlite correctly.","AlbertUlysses","closed",,,"","","" +84,"re-refactor normalize address","normalize address wrapper returns none as default again because that's how the library handle it, shouldn't return two different options.","AlbertUlysses","closed",,,"","","" +83,"refactored","refactoring some transformation files to fix bug","AlbertUlysses","closed",,,"","","" +82,"feat: hso_registrant script","HSO_Registrant script is complete. +","AlbertUlysses","closed",,,"","","" +81,"refactor: no code change files moved","I moved some files around to better reflect where they belong. +For exapmle, assessor table file will be move to processingscripts folder because the code is unique to the assessor dataset. +However, normalize_address_wrapper will stay in the transformations folder because multiple scripts use this. +Asssesor file (and builds) will now have the custom code and later include the processing code as well.","AlbertUlysses","closed",,,"","","" +79,"refactor: add a new file to handle multiple files","Remove old code that does multiple files for a job and include a new module that handles that.","AlbertUlysses","closed",,,"","","" +78,"feat: new script","Script that enters one fine stay data into platform table but has a different column layout then the other one fine stay sheets.","AlbertUlysses","closed",,,"","","" +77,"refactor: include steps for entire folder","Added some new code that helps with multiple fies in a folder. This should be moved into it's own module in the future.","AlbertUlysses","closed",,,"","","" +76,"refactor: move scripts to new folder","move all scripting files to processingscript folder and leave helper functions in the transformations folder.","AlbertUlysses","closed",,,"","","" +75,"feat: platform one fine stay","Adding the custom script for uploading one fine stay data into platform database.","AlbertUlysses","closed",,,"","","" +74,"refactor: refactor platform luxly","refactor the platform luxly file to make it more readable and closer allign with wemake style guide.","AlbertUlysses","closed",,,"","","" +73,"docs: update license","updated the license to reflect Hack For LA's best practices. ","AlbertUlysses","closed",,,"","","" +72,"feat: platform table luxly files","Completed script for loading luxly files into the platform database.","AlbertUlysses","closed",,,"","","" +71,"feat: add hso revoked table","The script for inserting HSO Revoked data is complete.","AlbertUlysses","closed",,,"","","" +70,"feat: exempt processing script","The processing script for the ""exempt"" dataset is done.","AlbertUlysses","closed",,,"","","" +69,"feat: noncompliant script","Add script that inserts noncompliant data into database.","AlbertUlysses","closed",,,"","","" +68,"feat: categorically inelligible","added a new script for the categorically ineligible dataset.","AlbertUlysses","closed",,,"","","" +67,"feat: add complaints script","Added a new script that inputs Complaints data into the database. +","AlbertUlysses","closed",,,"","","" +64,"feat: tot insert file","New insert script complete. +","AlbertUlysses","closed",,,"","","" +63,"feat: add new abstraction function","refactored some code and created a new abstraction function for inserting data. ","AlbertUlysses","closed",,,"","","" +62,"quick fix","removed absolute path : quick fix","AlbertUlysses","closed",,,"","","" +61,"feat: hso_denials insert function","Created a new file for hso_denial table, Added test for the custom functions. Added the code to insert the hso_denials data into the db. +1st dataset that's completely done +Need to go back to refactor the code because it's ugly","AlbertUlysses","closed",,,"","","" +58,"docs: adding COPYING file","removed some old jupyter files and added a license file which is called COPYING per GNU's suggestion.","AlbertUlysses","closed",,,"","","" +57,"Feat: add Airbnb tables","added airbnb tables and updated SQL Alchemy relationships.","AlbertUlysses","closed",,,"","","" +56,"feat: add SQL Alchemy models/Database","added SQL Alchemy Models and the Database code. ","AlbertUlysses","closed",,,"","","" +55,"docs: rewrote some docstrings etc","updated some doc strings in files to better reflect the intent.","AlbertUlysses","closed",,,"","","" +54,"feat: new code for date entries","I added new code and tests for formatting date columns. ","AlbertUlysses","closed",,,"","","" +53,"refactor: update assessor table","Did some refactoring that addressed these issues: + +- Rewrote the code to match the WeMake style guide. +- The functions are faster, and the memory is more efficient by ten times on average. +- The functions are pure now, with no added consequence to the DataFrames or Series passed into them. +All tests are still passing.","AlbertUlysses","closed",,,"","","" +52,"docs: re-wrote some of the normalize_address information","Docs improvement for normailze_address","AlbertUlysses","closed",,,"","","" +51,"refactored transformation scripts","refactored a lot and added extra notes in read me ","AlbertUlysses","closed",,,"","","" +50,"tot update/completion","ToT table is done. Refactored it a bit - still passing test +","AlbertUlysses","closed",,,"","","" +49,"work on tot table","WIP - started work on TOT table and tests I will finish in tomorrow and start on a new table.","AlbertUlysses","closed",,,"","","" +48,"Add expemtion table transformations","I added the transformation functions for exempt table with their tests, renamed the main folder.","AlbertUlysses","closed",,,"","","" +47,"updating folder layout and adding transformation scripts with tests","introduces the first of a few scripts to the project along with some tests. Needs refactoring but all tests are passing. + +","AlbertUlysses","closed",,,"","","" +42,"Update Pedestrian Data folder","Pedestrian Data folder including the relevant notebooks and visualizations.","dplem","closed",,,"","","" +41,"Push request redo preprocessing files","","KarinaLopez19","closed",,,"","","AlbertUlysses" +40,"Extend Mapillary API wrapper","These are new features for the python wrapper for Mapillary's API to allow it to: + +- Make requests of other layers of Mapillary data beyond traffic signs + +- Store photo image IDs associated with each item on map + +- Parse Mapillary's latitude/longitude string format + +- Give feedback while downloading data and timeout after 5 minutes, to prevent downloads from failing silently","henrykaplan","closed",,,"","henrykaplan","dplem,AlbertUlysses" +39,"add bbox function","Created bbox function to help create a boundary box for cleaning data. Added one test to ensure any refactoring in the future still returns the correct expected results. ","AlbertUlysses","closed",,,"","","" +38,"this is a test","","KarinaLopez19","closed",,,"","","" +37,"add new folder with new functions to help with clean up","Adding the folder for airbnb listing and first function for helper functions that will be used for cleaning the data +","AlbertUlysses","closed",,,"","","" +35,"clean up jupyter notebook","cleaned up the jupyter notebook to make it more user friendly","AlbertUlysses","closed",,,"","","" +34,"chore: clean up modules","used black to clean up modules and removed commented code that wasn't being used.","AlbertUlysses","closed",,,"","","" From 25c52565957786fba1cf8f419aa18fa8dc0c390b Mon Sep 17 00:00:00 2001 From: n2020h <72112832+n2020h@users.noreply.github.com> Date: Mon, 29 Jul 2024 18:47:37 -0400 Subject: [PATCH 30/49] Update pull_requests_to_csv.yml --- .github/workflows/pull_requests_to_csv.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/pull_requests_to_csv.yml b/.github/workflows/pull_requests_to_csv.yml index 389a0ab..d7b39dd 100644 --- a/.github/workflows/pull_requests_to_csv.yml +++ b/.github/workflows/pull_requests_to_csv.yml @@ -45,7 +45,7 @@ jobs: # Generate pull requests CSV - name: Generate pull requests CSV run: | - echo "PR Number,Title,Description,Author,State,Number of Commits,Number of Files Changed,Labels,Assignees,Reviewers" > my_org_PRs.csv + echo "PR Number,Title,Description,Author,State,Number of Commits,Number of Files Changed,Labels,Assignees,Reviewers" > pull_requests.csv jq -r '.[] | select(.user.login != "dependabot[bot]") | [ .number, .title, @@ -57,18 +57,18 @@ jobs: (.labels | map(.name) | join(",")), (.assignees | map(.login) | join(",")), (.requested_reviewers | map(.login) | join(",")) - ] | @csv' pulls.json >> my_org_PRs.csv + ] | @csv' pulls.json >> pull_requests.csv - # Check the content of my_org_PRs.csv for debugging - - name: Display my_org_PRs.csv content - run: cat my_org_PRs.csv + # Check the content of pull_requests.csv for debugging + - name: Display pull_requests.csv content + run: cat pull_requests.csv # Commit and push the generated CSV to the repository - name: Commit and push CSV run: | git config user.name "Automated" git config user.email "actions@users.noreply.github.com" - git add -f my_org_PRs.csv + git add -f pull_requests.csv timestamp=$(date -u) git commit -m "Latest pull requests data: ${timestamp}" || exit 0 git push --force origin HEAD:refs/heads/n2020h-issues-to-csv From c6fc3d14b01e5ae11aa1953b7e118532cb95215a Mon Sep 17 00:00:00 2001 From: Automated Date: Mon, 29 Jul 2024 22:47:53 +0000 Subject: [PATCH 31/49] Latest pull requests data: Mon Jul 29 22:47:53 UTC 2024 --- pull_requests.csv | 154 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 154 insertions(+) create mode 100644 pull_requests.csv diff --git a/pull_requests.csv b/pull_requests.csv new file mode 100644 index 0000000..4c2fa8f --- /dev/null +++ b/pull_requests.csv @@ -0,0 +1,154 @@ +PR Number,Title,Description,Author,State,Number of Commits,Number of Files Changed,Labels,Assignees,Reviewers +205,"Added data loading and cleaning Jupyter notebook","- Initial implementation of data loading/ saving and cleaning. +- Improvements planned for future versions.","mru-hub","closed",,,"","","" +201,"feat: City of Los Angeles Evictions #179","This commit introduces basic EDA on the LA evictions data, It introduces explode/splitting of Just Cause column for effective analysis + +starting #179 ","rahul897","open",,,"","","" +199,"177 create 311 data csv files that can be accessed through a jupyter notebook","Cleaning rules document from the 311-data","mru-hub","closed",,,"","","salice" +197,"N2020h issues to csv",,"n2020h","open",,,"","","" +196,"186 Adding Crime Data",,"dolla24","closed",,,"","dolla24","" +195,"fix name of image to work with Windows",,"salice","closed",,,"","","" +192,"Create issues-to-csv.yml","testing feature branch with draft pull request","n2020h","closed",,,"","","" +188,"Moving Workshop-Feedback_Bigram_Analysis.ipynb from access-the-data repo","Partially fixes https://github.com/hackforla/website/issues/6139 + +### What changes did you make? +- Adding Workshop-Feedback_Bigram_Analysis.ipynb from access-the-data repo (commit: https://github.com/hackforla/access-the-data/commit/62609601277509b0473bbf8f6dbf1ef735430c84) to this repo + +### Why did you make the changes? +- Requested by Bonnie per [this comment](https://github.com/hackforla/website/issues/5127#issuecomment-1776306588) +- AJ Price, Access the Data's PM, didn't want Jupyter Notebook to show up as a Language for the project, so Bonnie recommended moving the Workshop-Feedback_Bigram_Analysis.ipynb file to the Data Science repo. + +### Additional Notes +- I recommend using rebase merging to preserve who authored the file. +- To see examples of the commit history of the various methods of merging, see these branches in my forked repo: + - [Using rebase & merge](https://github.com/JessicaLucindaCheng/data-science/commits/rebase-merge): Recommended + - [Using merge commit](https://github.com/JessicaLucindaCheng/data-science/commits/merge-commit): Okay but results in 2 commits + - [Using squash & merge](https://github.com/JessicaLucindaCheng/data-science/commits/squash-merge): Not recommended because it doesn't preserve author's credit","JessicaLucindaCheng","closed",,,"","","" +187,"Moving Workshop-Feedback_Bigram_Analysis.ipynb from access-the-data repo","Fixes N/A - + +### What changes did you make? +- Adding Workshop-Feedback_Bigram_Analysis.ipynb from access-the-data repo (commit: https://github.com/hackforla/access-the-data/commit/62609601277509b0473bbf8f6dbf1ef735430c84) to this repo + +### Why did you make the changes? +- Requested by Bonnie per [this comment](https://github.com/hackforla/website/issues/5127#issuecomment-1776306588) +- AJ Price, Access the Data's PM, didn't want Jupyter Notebook to show up as a Language for the project, so Bonnie recommended moving the Workshop-Feedback_Bigram_Analysis.ipynb file to the Data Science repo. + +### Additional Notes +- I recommend using rebase merging to preserve who authored the file. +- To see examples of the commit history of the various methods of merging, see these branches in my forked repo: + - [Using rebase & merge](https://github.com/JessicaLucindaCheng/data-science/commits/rebase-merge): Recommended + - [Using merge commit](https://github.com/JessicaLucindaCheng/data-science/commits/merge-commit): Okay but results in 2 commits + - [Using squash & merge](https://github.com/JessicaLucindaCheng/data-science/commits/squash-merge): Not recommended because it doesn't preserve author's credit","JessicaLucindaCheng","closed",,,"","","" +186,"Lalla la crime analysis 2",,"Lalla22","open",,,"","","salice" +185,"Create README.md",,"Lalla22","closed",,,"","","" +184,"git c",,"Lalla22","closed",,,"","","" +168,"Updating label analysis jupyter notebook with co-occurrence analysis",,"rbianchetti","open",,,"","","" +167,"Merge pull request #164 from hackforla/160-survey-repo-labels","A python script that gets all the labels used in open issues across a…","codemamma","open",,,"","","" +164,"A python script that gets all the labels used in open issues across a…","…n organization(s) via GitHub API","rbianchetti","closed",,,"","","" +159,"Linking latest pedestrian safety report presentation","This replaces a previous powerpoint presentation stored in the repo with links to the current report and presentation saved in Hack for LA's shared google drive.","henrykaplan","open",,,"","","akhaleghi" +158,"Data Science logo links and image files","The latest version of the Hack for LA data science logo is being worked on in Google Drive. I've updated the directory in the git repo with links to the latest and with png and svg versions of the latest logo files.","henrykaplan","closed",,,"","","akhaleghi" +139,"Removed spaces from all filenames in pedestrian data directory","Changed filenames of all output files and graphics. No changes to file contents — these are final graphics and reports, and aren't aren't referenced by file path anywhere.","henrykaplan","closed",,,"","","" +132,"Pedestrian safety","Added readme documentation for downloading and organizing data, running Jupyter notebooks, and reorganized analysis files.","henrykaplan","closed",,,"","","" +123,"Create a new issue template for epics",,"akhaleghi","closed",,,"","","" +122,"Pedestrian safety draft visualizations and presentation","Further simplified and clarified maps and traffic accident charts, added these new charts and related discussion to the presentation file.","henrykaplan","closed",,,"","","AlbertUlysses" +117,"Pedestrian safety","- Add optional parameter to mapillary wrapper client to allow data requests of recent data only, to quickly update existing datasets. +- Better maps and visualizations. +- Use DBScan clustering algorithm to find clusters of traffic accidents (simplest way to roughly group accidents at specific intersections, as the accident data is mostly based around intersections). +- Updated report ppt for August 5 presentation to data science group","henrykaplan","closed",,,"","","" +116,"webscraping folder initial commit","[Edited] + +This PR is a stand alone folder added to the 311-data folder. It contains a webscraping script for the tech stacks of each NC survey. See issue [44](https://github.com/hackforla/data-science/issues/44). The readme contains a link to the google sheets table of the tech used for each website.","rajindermavi","closed",,,"","","" +115,"docs","updated readme to include sample queries that LAANE is interested in doing against the database","AlbertUlysses","closed",,,"","","" +114,"docs","Update README/todos","AlbertUlysses","closed",,,"","","" +113,"docs","Updated misc docs","AlbertUlysses","closed",,,"","","" +111,"Docs!","Updated docs like readme and add extra notes to some other files","AlbertUlysses","closed",,,"","","" +105,"debug","debugged some code that was entering incorrect addresses","AlbertUlysses","closed",,,"","","" +104,"docs/clean up","updated README and removed some old files.","AlbertUlysses","closed",,,"","","" +103,"feat: assessor script","Last script, passes tests refactored a bit","AlbertUlysses","closed",,,"","","" +102,"docs","Update Readme +removed a lot of personal notes aimed for me during script writing","AlbertUlysses","closed",,,"","","" +101,"debug categorically inelligible","debug categorically inelligibl data","AlbertUlysses","closed",,,"","","" +100,"New scripts + debug","added some code that debugs the ofs and luxly datasets. +Added a new script that handles warnings and citations.","AlbertUlysses","closed",,,"","","" +99,"bugfix","fixed bugs across 4 files that dealth with addresses that returned 0 for zipcode instead of a 5 number digit","AlbertUlysses","closed",,,"","","" +98,"feat: new script for warning","This script is for warning files that don't have only addresses and dates.","AlbertUlysses","closed",,,"","","" +97,"Two commits","First commit fixes the warning script file. +The second commit adds an extra line describe what the script is for. +","AlbertUlysses","closed",,,"","","" +96,"feat: new processesing script","script for airbnb reviews is complete.","AlbertUlysses","closed",,,"","","" +95,"feat: airbnb dataset script","airbnb script that uploads listings and host information ","AlbertUlysses","closed",,,"","","" +93,"debug one fine stay scripts","debug one fine stay scripts for address2 errors","AlbertUlysses","closed",,,"","","" +92,"debug","debugged the data from one fine stay.","AlbertUlysses","closed",,,"","","" +91,"debug","Did some debugging for luxly platform script","AlbertUlysses","closed",,,"","","" +90,"fixbug in hsodenials","hsodenials bugs are remved, hsodenials script is good to go.","AlbertUlysses","closed",,,"","","" +89,"debug","Debugged the hso_registrant enteries.","AlbertUlysses","closed",,,"","","" +88,"bugfix: fixed exempt script","fixed exempt table to match the rest of the database","AlbertUlysses","closed",,,"","","" +87,"fixbug: fixed bug in categorically inelligible","fixed categoically ineligible bug","AlbertUlysses","closed",,,"","","" +86,"bug fix: fixed bug for complaints data","fixed complaints data to not have nulls and to not allow any state that don't use two letter abbreviation. ","AlbertUlysses","closed",,,"","","" +85,"refactor/debug: debugging each script","Refactored tot script to make sure it is uploading to sqlite correctly.","AlbertUlysses","closed",,,"","","" +84,"re-refactor normalize address","normalize address wrapper returns none as default again because that's how the library handle it, shouldn't return two different options.","AlbertUlysses","closed",,,"","","" +83,"refactored","refactoring some transformation files to fix bug","AlbertUlysses","closed",,,"","","" +82,"feat: hso_registrant script","HSO_Registrant script is complete. +","AlbertUlysses","closed",,,"","","" +81,"refactor: no code change files moved","I moved some files around to better reflect where they belong. +For exapmle, assessor table file will be move to processingscripts folder because the code is unique to the assessor dataset. +However, normalize_address_wrapper will stay in the transformations folder because multiple scripts use this. +Asssesor file (and builds) will now have the custom code and later include the processing code as well.","AlbertUlysses","closed",,,"","","" +79,"refactor: add a new file to handle multiple files","Remove old code that does multiple files for a job and include a new module that handles that.","AlbertUlysses","closed",,,"","","" +78,"feat: new script","Script that enters one fine stay data into platform table but has a different column layout then the other one fine stay sheets.","AlbertUlysses","closed",,,"","","" +77,"refactor: include steps for entire folder","Added some new code that helps with multiple fies in a folder. This should be moved into it's own module in the future.","AlbertUlysses","closed",,,"","","" +76,"refactor: move scripts to new folder","move all scripting files to processingscript folder and leave helper functions in the transformations folder.","AlbertUlysses","closed",,,"","","" +75,"feat: platform one fine stay","Adding the custom script for uploading one fine stay data into platform database.","AlbertUlysses","closed",,,"","","" +74,"refactor: refactor platform luxly","refactor the platform luxly file to make it more readable and closer allign with wemake style guide.","AlbertUlysses","closed",,,"","","" +73,"docs: update license","updated the license to reflect Hack For LA's best practices. ","AlbertUlysses","closed",,,"","","" +72,"feat: platform table luxly files","Completed script for loading luxly files into the platform database.","AlbertUlysses","closed",,,"","","" +71,"feat: add hso revoked table","The script for inserting HSO Revoked data is complete.","AlbertUlysses","closed",,,"","","" +70,"feat: exempt processing script","The processing script for the ""exempt"" dataset is done.","AlbertUlysses","closed",,,"","","" +69,"feat: noncompliant script","Add script that inserts noncompliant data into database.","AlbertUlysses","closed",,,"","","" +68,"feat: categorically inelligible","added a new script for the categorically ineligible dataset.","AlbertUlysses","closed",,,"","","" +67,"feat: add complaints script","Added a new script that inputs Complaints data into the database. +","AlbertUlysses","closed",,,"","","" +64,"feat: tot insert file","New insert script complete. +","AlbertUlysses","closed",,,"","","" +63,"feat: add new abstraction function","refactored some code and created a new abstraction function for inserting data. ","AlbertUlysses","closed",,,"","","" +62,"quick fix","removed absolute path : quick fix","AlbertUlysses","closed",,,"","","" +61,"feat: hso_denials insert function","Created a new file for hso_denial table, Added test for the custom functions. Added the code to insert the hso_denials data into the db. +1st dataset that's completely done +Need to go back to refactor the code because it's ugly","AlbertUlysses","closed",,,"","","" +58,"docs: adding COPYING file","removed some old jupyter files and added a license file which is called COPYING per GNU's suggestion.","AlbertUlysses","closed",,,"","","" +57,"Feat: add Airbnb tables","added airbnb tables and updated SQL Alchemy relationships.","AlbertUlysses","closed",,,"","","" +56,"feat: add SQL Alchemy models/Database","added SQL Alchemy Models and the Database code. ","AlbertUlysses","closed",,,"","","" +55,"docs: rewrote some docstrings etc","updated some doc strings in files to better reflect the intent.","AlbertUlysses","closed",,,"","","" +54,"feat: new code for date entries","I added new code and tests for formatting date columns. ","AlbertUlysses","closed",,,"","","" +53,"refactor: update assessor table","Did some refactoring that addressed these issues: + +- Rewrote the code to match the WeMake style guide. +- The functions are faster, and the memory is more efficient by ten times on average. +- The functions are pure now, with no added consequence to the DataFrames or Series passed into them. +All tests are still passing.","AlbertUlysses","closed",,,"","","" +52,"docs: re-wrote some of the normalize_address information","Docs improvement for normailze_address","AlbertUlysses","closed",,,"","","" +51,"refactored transformation scripts","refactored a lot and added extra notes in read me ","AlbertUlysses","closed",,,"","","" +50,"tot update/completion","ToT table is done. Refactored it a bit - still passing test +","AlbertUlysses","closed",,,"","","" +49,"work on tot table","WIP - started work on TOT table and tests I will finish in tomorrow and start on a new table.","AlbertUlysses","closed",,,"","","" +48,"Add expemtion table transformations","I added the transformation functions for exempt table with their tests, renamed the main folder.","AlbertUlysses","closed",,,"","","" +47,"updating folder layout and adding transformation scripts with tests","introduces the first of a few scripts to the project along with some tests. Needs refactoring but all tests are passing. + +","AlbertUlysses","closed",,,"","","" +42,"Update Pedestrian Data folder","Pedestrian Data folder including the relevant notebooks and visualizations.","dplem","closed",,,"","","" +41,"Push request redo preprocessing files","","KarinaLopez19","closed",,,"","","AlbertUlysses" +40,"Extend Mapillary API wrapper","These are new features for the python wrapper for Mapillary's API to allow it to: + +- Make requests of other layers of Mapillary data beyond traffic signs + +- Store photo image IDs associated with each item on map + +- Parse Mapillary's latitude/longitude string format + +- Give feedback while downloading data and timeout after 5 minutes, to prevent downloads from failing silently","henrykaplan","closed",,,"","henrykaplan","dplem,AlbertUlysses" +39,"add bbox function","Created bbox function to help create a boundary box for cleaning data. Added one test to ensure any refactoring in the future still returns the correct expected results. ","AlbertUlysses","closed",,,"","","" +38,"this is a test","","KarinaLopez19","closed",,,"","","" +37,"add new folder with new functions to help with clean up","Adding the folder for airbnb listing and first function for helper functions that will be used for cleaning the data +","AlbertUlysses","closed",,,"","","" +35,"clean up jupyter notebook","cleaned up the jupyter notebook to make it more user friendly","AlbertUlysses","closed",,,"","","" +34,"chore: clean up modules","used black to clean up modules and removed commented code that wasn't being used.","AlbertUlysses","closed",,,"","","" From 2f9bcb82cd99c74cfa765f56b824981d129d62bb Mon Sep 17 00:00:00 2001 From: n2020h <72112832+n2020h@users.noreply.github.com> Date: Tue, 6 Aug 2024 21:18:20 -0400 Subject: [PATCH 32/49] Update pull_requests_to_csv.yml --- .github/workflows/pull_requests_to_csv.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/pull_requests_to_csv.yml b/.github/workflows/pull_requests_to_csv.yml index d7b39dd..59eb113 100644 --- a/.github/workflows/pull_requests_to_csv.yml +++ b/.github/workflows/pull_requests_to_csv.yml @@ -22,8 +22,6 @@ jobs: ###------------------------------------------------------####### # Set up Node.js to use jq command - # trying thiss - # trying this!!! - name: Set up Node.js uses: actions/setup-node@v3 with: From 6248bb7f6c28da4e0524ced62de77c613ed817ce Mon Sep 17 00:00:00 2001 From: Automated Date: Wed, 7 Aug 2024 01:18:35 +0000 Subject: [PATCH 33/49] Latest pull requests data: Wed Aug 7 01:18:35 UTC 2024 --- pull_requests.csv | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pull_requests.csv b/pull_requests.csv index 4c2fa8f..2cb569e 100644 --- a/pull_requests.csv +++ b/pull_requests.csv @@ -5,7 +5,7 @@ PR Number,Title,Description,Author,State,Number of Commits,Number of Files Chang starting #179 ","rahul897","open",,,"","","" 199,"177 create 311 data csv files that can be accessed through a jupyter notebook","Cleaning rules document from the 311-data","mru-hub","closed",,,"","","salice" -197,"N2020h issues to csv",,"n2020h","open",,,"","","" +197,"N2020h issues to csv",,"n2020h","open",,,"","n2020h","" 196,"186 Adding Crime Data",,"dolla24","closed",,,"","dolla24","" 195,"fix name of image to work with Windows",,"salice","closed",,,"","","" 192,"Create issues-to-csv.yml","testing feature branch with draft pull request","n2020h","closed",,,"","","" From f93fc37f92267007ebd2c25089a7c161ef3dfbe4 Mon Sep 17 00:00:00 2001 From: n2020h <72112832+n2020h@users.noreply.github.com> Date: Tue, 6 Aug 2024 22:34:59 -0400 Subject: [PATCH 34/49] Add Linked Issues.yml Add Linked Issues --- .github/workflows/pull_requests_to_csv.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pull_requests_to_csv.yml b/.github/workflows/pull_requests_to_csv.yml index 59eb113..f512965 100644 --- a/.github/workflows/pull_requests_to_csv.yml +++ b/.github/workflows/pull_requests_to_csv.yml @@ -43,7 +43,7 @@ jobs: # Generate pull requests CSV - name: Generate pull requests CSV run: | - echo "PR Number,Title,Description,Author,State,Number of Commits,Number of Files Changed,Labels,Assignees,Reviewers" > pull_requests.csv + echo "PR Number,Title,Description,Author,State,Number of Commits,Number of Files Changed,Labels,Assignees,Reviewers, Linked Issues" > pull_requests.csv jq -r '.[] | select(.user.login != "dependabot[bot]") | [ .number, .title, @@ -54,7 +54,8 @@ jobs: .changed_files, (.labels | map(.name) | join(",")), (.assignees | map(.login) | join(",")), - (.requested_reviewers | map(.login) | join(",")) + (.requested_reviewers | map(.login) | join(",")), + (.body | capture_all("#(?\\d+)"; "g") | join(",")) ] | @csv' pulls.json >> pull_requests.csv # Check the content of pull_requests.csv for debugging From ba42f8f2f31f197aa9e6fb4f6d3110128c67686d Mon Sep 17 00:00:00 2001 From: n2020h <72112832+n2020h@users.noreply.github.com> Date: Tue, 6 Aug 2024 22:44:22 -0400 Subject: [PATCH 35/49] Update pull_requests_to_csv.yml Debug pull_requests.csv --- .github/workflows/pull_requests_to_csv.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/pull_requests_to_csv.yml b/.github/workflows/pull_requests_to_csv.yml index f512965..8804261 100644 --- a/.github/workflows/pull_requests_to_csv.yml +++ b/.github/workflows/pull_requests_to_csv.yml @@ -41,6 +41,7 @@ jobs: run: cat pulls.json # Generate pull requests CSV + # (.body | capture_all("#(?\\d+)"; "g") | join(",")) - name: Generate pull requests CSV run: | echo "PR Number,Title,Description,Author,State,Number of Commits,Number of Files Changed,Labels,Assignees,Reviewers, Linked Issues" > pull_requests.csv @@ -55,7 +56,7 @@ jobs: (.labels | map(.name) | join(",")), (.assignees | map(.login) | join(",")), (.requested_reviewers | map(.login) | join(",")), - (.body | capture_all("#(?\\d+)"; "g") | join(",")) + (.body | gsub("#";" ") | split(" ") | map(select(startswith("issue_number"))) | join(",")) ] | @csv' pulls.json >> pull_requests.csv # Check the content of pull_requests.csv for debugging From 6f934ba2da3b6740774414f95f7139ebe1b87453 Mon Sep 17 00:00:00 2001 From: n2020h <72112832+n2020h@users.noreply.github.com> Date: Tue, 6 Aug 2024 22:47:25 -0400 Subject: [PATCH 36/49] Update pull_requests_to_csv.yml Debug --- .github/workflows/pull_requests_to_csv.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pull_requests_to_csv.yml b/.github/workflows/pull_requests_to_csv.yml index 8804261..a2a8bce 100644 --- a/.github/workflows/pull_requests_to_csv.yml +++ b/.github/workflows/pull_requests_to_csv.yml @@ -56,7 +56,7 @@ jobs: (.labels | map(.name) | join(",")), (.assignees | map(.login) | join(",")), (.requested_reviewers | map(.login) | join(",")), - (.body | gsub("#";" ") | split(" ") | map(select(startswith("issue_number"))) | join(",")) + (if .body != null then .body | gsub("#";" ") | split(" ") | map(select(startswith("issue_number"))) | join(",") else "" end) ] | @csv' pulls.json >> pull_requests.csv # Check the content of pull_requests.csv for debugging From 7eb0ec444b5760fff6e4d9f89197cac1fc87a522 Mon Sep 17 00:00:00 2001 From: Automated Date: Wed, 7 Aug 2024 02:47:40 +0000 Subject: [PATCH 37/49] Latest pull requests data: Wed Aug 7 02:47:40 UTC 2024 --- pull_requests.csv | 178 +++++++++++++++++++++++----------------------- 1 file changed, 89 insertions(+), 89 deletions(-) diff --git a/pull_requests.csv b/pull_requests.csv index 2cb569e..03e9b2b 100644 --- a/pull_requests.csv +++ b/pull_requests.csv @@ -1,14 +1,14 @@ -PR Number,Title,Description,Author,State,Number of Commits,Number of Files Changed,Labels,Assignees,Reviewers +PR Number,Title,Description,Author,State,Number of Commits,Number of Files Changed,Labels,Assignees,Reviewers, Linked Issues 205,"Added data loading and cleaning Jupyter notebook","- Initial implementation of data loading/ saving and cleaning. -- Improvements planned for future versions.","mru-hub","closed",,,"","","" +- Improvements planned for future versions.","mru-hub","closed",,,"","","","" 201,"feat: City of Los Angeles Evictions #179","This commit introduces basic EDA on the LA evictions data, It introduces explode/splitting of Just Cause column for effective analysis -starting #179 ","rahul897","open",,,"","","" -199,"177 create 311 data csv files that can be accessed through a jupyter notebook","Cleaning rules document from the 311-data","mru-hub","closed",,,"","","salice" -197,"N2020h issues to csv",,"n2020h","open",,,"","n2020h","" -196,"186 Adding Crime Data",,"dolla24","closed",,,"","dolla24","" -195,"fix name of image to work with Windows",,"salice","closed",,,"","","" -192,"Create issues-to-csv.yml","testing feature branch with draft pull request","n2020h","closed",,,"","","" +starting #179 ","rahul897","open",,,"","","","" +199,"177 create 311 data csv files that can be accessed through a jupyter notebook","Cleaning rules document from the 311-data","mru-hub","closed",,,"","","salice","" +197,"N2020h issues to csv",,"n2020h","open",,,"","n2020h","","" +196,"186 Adding Crime Data",,"dolla24","closed",,,"","dolla24","","" +195,"fix name of image to work with Windows",,"salice","closed",,,"","","","" +192,"Create issues-to-csv.yml","testing feature branch with draft pull request","n2020h","closed",,,"","","","" 188,"Moving Workshop-Feedback_Bigram_Analysis.ipynb from access-the-data repo","Partially fixes https://github.com/hackforla/website/issues/6139 ### What changes did you make? @@ -23,7 +23,7 @@ starting #179 ","rahul897","open",,,"","","" - To see examples of the commit history of the various methods of merging, see these branches in my forked repo: - [Using rebase & merge](https://github.com/JessicaLucindaCheng/data-science/commits/rebase-merge): Recommended - [Using merge commit](https://github.com/JessicaLucindaCheng/data-science/commits/merge-commit): Okay but results in 2 commits - - [Using squash & merge](https://github.com/JessicaLucindaCheng/data-science/commits/squash-merge): Not recommended because it doesn't preserve author's credit","JessicaLucindaCheng","closed",,,"","","" + - [Using squash & merge](https://github.com/JessicaLucindaCheng/data-science/commits/squash-merge): Not recommended because it doesn't preserve author's credit","JessicaLucindaCheng","closed",,,"","","","" 187,"Moving Workshop-Feedback_Bigram_Analysis.ipynb from access-the-data repo","Fixes N/A - ### What changes did you make? @@ -38,105 +38,105 @@ starting #179 ","rahul897","open",,,"","","" - To see examples of the commit history of the various methods of merging, see these branches in my forked repo: - [Using rebase & merge](https://github.com/JessicaLucindaCheng/data-science/commits/rebase-merge): Recommended - [Using merge commit](https://github.com/JessicaLucindaCheng/data-science/commits/merge-commit): Okay but results in 2 commits - - [Using squash & merge](https://github.com/JessicaLucindaCheng/data-science/commits/squash-merge): Not recommended because it doesn't preserve author's credit","JessicaLucindaCheng","closed",,,"","","" -186,"Lalla la crime analysis 2",,"Lalla22","open",,,"","","salice" -185,"Create README.md",,"Lalla22","closed",,,"","","" -184,"git c",,"Lalla22","closed",,,"","","" -168,"Updating label analysis jupyter notebook with co-occurrence analysis",,"rbianchetti","open",,,"","","" -167,"Merge pull request #164 from hackforla/160-survey-repo-labels","A python script that gets all the labels used in open issues across a…","codemamma","open",,,"","","" -164,"A python script that gets all the labels used in open issues across a…","…n organization(s) via GitHub API","rbianchetti","closed",,,"","","" -159,"Linking latest pedestrian safety report presentation","This replaces a previous powerpoint presentation stored in the repo with links to the current report and presentation saved in Hack for LA's shared google drive.","henrykaplan","open",,,"","","akhaleghi" -158,"Data Science logo links and image files","The latest version of the Hack for LA data science logo is being worked on in Google Drive. I've updated the directory in the git repo with links to the latest and with png and svg versions of the latest logo files.","henrykaplan","closed",,,"","","akhaleghi" -139,"Removed spaces from all filenames in pedestrian data directory","Changed filenames of all output files and graphics. No changes to file contents — these are final graphics and reports, and aren't aren't referenced by file path anywhere.","henrykaplan","closed",,,"","","" -132,"Pedestrian safety","Added readme documentation for downloading and organizing data, running Jupyter notebooks, and reorganized analysis files.","henrykaplan","closed",,,"","","" -123,"Create a new issue template for epics",,"akhaleghi","closed",,,"","","" -122,"Pedestrian safety draft visualizations and presentation","Further simplified and clarified maps and traffic accident charts, added these new charts and related discussion to the presentation file.","henrykaplan","closed",,,"","","AlbertUlysses" + - [Using squash & merge](https://github.com/JessicaLucindaCheng/data-science/commits/squash-merge): Not recommended because it doesn't preserve author's credit","JessicaLucindaCheng","closed",,,"","","","" +186,"Lalla la crime analysis 2",,"Lalla22","open",,,"","","salice","" +185,"Create README.md",,"Lalla22","closed",,,"","","","" +184,"git c",,"Lalla22","closed",,,"","","","" +168,"Updating label analysis jupyter notebook with co-occurrence analysis",,"rbianchetti","open",,,"","","","" +167,"Merge pull request #164 from hackforla/160-survey-repo-labels","A python script that gets all the labels used in open issues across a…","codemamma","open",,,"","","","" +164,"A python script that gets all the labels used in open issues across a…","…n organization(s) via GitHub API","rbianchetti","closed",,,"","","","" +159,"Linking latest pedestrian safety report presentation","This replaces a previous powerpoint presentation stored in the repo with links to the current report and presentation saved in Hack for LA's shared google drive.","henrykaplan","open",,,"","","akhaleghi","" +158,"Data Science logo links and image files","The latest version of the Hack for LA data science logo is being worked on in Google Drive. I've updated the directory in the git repo with links to the latest and with png and svg versions of the latest logo files.","henrykaplan","closed",,,"","","akhaleghi","" +139,"Removed spaces from all filenames in pedestrian data directory","Changed filenames of all output files and graphics. No changes to file contents — these are final graphics and reports, and aren't aren't referenced by file path anywhere.","henrykaplan","closed",,,"","","","" +132,"Pedestrian safety","Added readme documentation for downloading and organizing data, running Jupyter notebooks, and reorganized analysis files.","henrykaplan","closed",,,"","","","" +123,"Create a new issue template for epics",,"akhaleghi","closed",,,"","","","" +122,"Pedestrian safety draft visualizations and presentation","Further simplified and clarified maps and traffic accident charts, added these new charts and related discussion to the presentation file.","henrykaplan","closed",,,"","","AlbertUlysses","" 117,"Pedestrian safety","- Add optional parameter to mapillary wrapper client to allow data requests of recent data only, to quickly update existing datasets. - Better maps and visualizations. - Use DBScan clustering algorithm to find clusters of traffic accidents (simplest way to roughly group accidents at specific intersections, as the accident data is mostly based around intersections). -- Updated report ppt for August 5 presentation to data science group","henrykaplan","closed",,,"","","" +- Updated report ppt for August 5 presentation to data science group","henrykaplan","closed",,,"","","","" 116,"webscraping folder initial commit","[Edited] -This PR is a stand alone folder added to the 311-data folder. It contains a webscraping script for the tech stacks of each NC survey. See issue [44](https://github.com/hackforla/data-science/issues/44). The readme contains a link to the google sheets table of the tech used for each website.","rajindermavi","closed",,,"","","" -115,"docs","updated readme to include sample queries that LAANE is interested in doing against the database","AlbertUlysses","closed",,,"","","" -114,"docs","Update README/todos","AlbertUlysses","closed",,,"","","" -113,"docs","Updated misc docs","AlbertUlysses","closed",,,"","","" -111,"Docs!","Updated docs like readme and add extra notes to some other files","AlbertUlysses","closed",,,"","","" -105,"debug","debugged some code that was entering incorrect addresses","AlbertUlysses","closed",,,"","","" -104,"docs/clean up","updated README and removed some old files.","AlbertUlysses","closed",,,"","","" -103,"feat: assessor script","Last script, passes tests refactored a bit","AlbertUlysses","closed",,,"","","" +This PR is a stand alone folder added to the 311-data folder. It contains a webscraping script for the tech stacks of each NC survey. See issue [44](https://github.com/hackforla/data-science/issues/44). The readme contains a link to the google sheets table of the tech used for each website.","rajindermavi","closed",,,"","","","" +115,"docs","updated readme to include sample queries that LAANE is interested in doing against the database","AlbertUlysses","closed",,,"","","","" +114,"docs","Update README/todos","AlbertUlysses","closed",,,"","","","" +113,"docs","Updated misc docs","AlbertUlysses","closed",,,"","","","" +111,"Docs!","Updated docs like readme and add extra notes to some other files","AlbertUlysses","closed",,,"","","","" +105,"debug","debugged some code that was entering incorrect addresses","AlbertUlysses","closed",,,"","","","" +104,"docs/clean up","updated README and removed some old files.","AlbertUlysses","closed",,,"","","","" +103,"feat: assessor script","Last script, passes tests refactored a bit","AlbertUlysses","closed",,,"","","","" 102,"docs","Update Readme -removed a lot of personal notes aimed for me during script writing","AlbertUlysses","closed",,,"","","" -101,"debug categorically inelligible","debug categorically inelligibl data","AlbertUlysses","closed",,,"","","" +removed a lot of personal notes aimed for me during script writing","AlbertUlysses","closed",,,"","","","" +101,"debug categorically inelligible","debug categorically inelligibl data","AlbertUlysses","closed",,,"","","","" 100,"New scripts + debug","added some code that debugs the ofs and luxly datasets. -Added a new script that handles warnings and citations.","AlbertUlysses","closed",,,"","","" -99,"bugfix","fixed bugs across 4 files that dealth with addresses that returned 0 for zipcode instead of a 5 number digit","AlbertUlysses","closed",,,"","","" -98,"feat: new script for warning","This script is for warning files that don't have only addresses and dates.","AlbertUlysses","closed",,,"","","" +Added a new script that handles warnings and citations.","AlbertUlysses","closed",,,"","","","" +99,"bugfix","fixed bugs across 4 files that dealth with addresses that returned 0 for zipcode instead of a 5 number digit","AlbertUlysses","closed",,,"","","","" +98,"feat: new script for warning","This script is for warning files that don't have only addresses and dates.","AlbertUlysses","closed",,,"","","","" 97,"Two commits","First commit fixes the warning script file. The second commit adds an extra line describe what the script is for. -","AlbertUlysses","closed",,,"","","" -96,"feat: new processesing script","script for airbnb reviews is complete.","AlbertUlysses","closed",,,"","","" -95,"feat: airbnb dataset script","airbnb script that uploads listings and host information ","AlbertUlysses","closed",,,"","","" -93,"debug one fine stay scripts","debug one fine stay scripts for address2 errors","AlbertUlysses","closed",,,"","","" -92,"debug","debugged the data from one fine stay.","AlbertUlysses","closed",,,"","","" -91,"debug","Did some debugging for luxly platform script","AlbertUlysses","closed",,,"","","" -90,"fixbug in hsodenials","hsodenials bugs are remved, hsodenials script is good to go.","AlbertUlysses","closed",,,"","","" -89,"debug","Debugged the hso_registrant enteries.","AlbertUlysses","closed",,,"","","" -88,"bugfix: fixed exempt script","fixed exempt table to match the rest of the database","AlbertUlysses","closed",,,"","","" -87,"fixbug: fixed bug in categorically inelligible","fixed categoically ineligible bug","AlbertUlysses","closed",,,"","","" -86,"bug fix: fixed bug for complaints data","fixed complaints data to not have nulls and to not allow any state that don't use two letter abbreviation. ","AlbertUlysses","closed",,,"","","" -85,"refactor/debug: debugging each script","Refactored tot script to make sure it is uploading to sqlite correctly.","AlbertUlysses","closed",,,"","","" -84,"re-refactor normalize address","normalize address wrapper returns none as default again because that's how the library handle it, shouldn't return two different options.","AlbertUlysses","closed",,,"","","" -83,"refactored","refactoring some transformation files to fix bug","AlbertUlysses","closed",,,"","","" +","AlbertUlysses","closed",,,"","","","" +96,"feat: new processesing script","script for airbnb reviews is complete.","AlbertUlysses","closed",,,"","","","" +95,"feat: airbnb dataset script","airbnb script that uploads listings and host information ","AlbertUlysses","closed",,,"","","","" +93,"debug one fine stay scripts","debug one fine stay scripts for address2 errors","AlbertUlysses","closed",,,"","","","" +92,"debug","debugged the data from one fine stay.","AlbertUlysses","closed",,,"","","","" +91,"debug","Did some debugging for luxly platform script","AlbertUlysses","closed",,,"","","","" +90,"fixbug in hsodenials","hsodenials bugs are remved, hsodenials script is good to go.","AlbertUlysses","closed",,,"","","","" +89,"debug","Debugged the hso_registrant enteries.","AlbertUlysses","closed",,,"","","","" +88,"bugfix: fixed exempt script","fixed exempt table to match the rest of the database","AlbertUlysses","closed",,,"","","","" +87,"fixbug: fixed bug in categorically inelligible","fixed categoically ineligible bug","AlbertUlysses","closed",,,"","","","" +86,"bug fix: fixed bug for complaints data","fixed complaints data to not have nulls and to not allow any state that don't use two letter abbreviation. ","AlbertUlysses","closed",,,"","","","" +85,"refactor/debug: debugging each script","Refactored tot script to make sure it is uploading to sqlite correctly.","AlbertUlysses","closed",,,"","","","" +84,"re-refactor normalize address","normalize address wrapper returns none as default again because that's how the library handle it, shouldn't return two different options.","AlbertUlysses","closed",,,"","","","" +83,"refactored","refactoring some transformation files to fix bug","AlbertUlysses","closed",,,"","","","" 82,"feat: hso_registrant script","HSO_Registrant script is complete. -","AlbertUlysses","closed",,,"","","" +","AlbertUlysses","closed",,,"","","","" 81,"refactor: no code change files moved","I moved some files around to better reflect where they belong. For exapmle, assessor table file will be move to processingscripts folder because the code is unique to the assessor dataset. However, normalize_address_wrapper will stay in the transformations folder because multiple scripts use this. -Asssesor file (and builds) will now have the custom code and later include the processing code as well.","AlbertUlysses","closed",,,"","","" -79,"refactor: add a new file to handle multiple files","Remove old code that does multiple files for a job and include a new module that handles that.","AlbertUlysses","closed",,,"","","" -78,"feat: new script","Script that enters one fine stay data into platform table but has a different column layout then the other one fine stay sheets.","AlbertUlysses","closed",,,"","","" -77,"refactor: include steps for entire folder","Added some new code that helps with multiple fies in a folder. This should be moved into it's own module in the future.","AlbertUlysses","closed",,,"","","" -76,"refactor: move scripts to new folder","move all scripting files to processingscript folder and leave helper functions in the transformations folder.","AlbertUlysses","closed",,,"","","" -75,"feat: platform one fine stay","Adding the custom script for uploading one fine stay data into platform database.","AlbertUlysses","closed",,,"","","" -74,"refactor: refactor platform luxly","refactor the platform luxly file to make it more readable and closer allign with wemake style guide.","AlbertUlysses","closed",,,"","","" -73,"docs: update license","updated the license to reflect Hack For LA's best practices. ","AlbertUlysses","closed",,,"","","" -72,"feat: platform table luxly files","Completed script for loading luxly files into the platform database.","AlbertUlysses","closed",,,"","","" -71,"feat: add hso revoked table","The script for inserting HSO Revoked data is complete.","AlbertUlysses","closed",,,"","","" -70,"feat: exempt processing script","The processing script for the ""exempt"" dataset is done.","AlbertUlysses","closed",,,"","","" -69,"feat: noncompliant script","Add script that inserts noncompliant data into database.","AlbertUlysses","closed",,,"","","" -68,"feat: categorically inelligible","added a new script for the categorically ineligible dataset.","AlbertUlysses","closed",,,"","","" +Asssesor file (and builds) will now have the custom code and later include the processing code as well.","AlbertUlysses","closed",,,"","","","" +79,"refactor: add a new file to handle multiple files","Remove old code that does multiple files for a job and include a new module that handles that.","AlbertUlysses","closed",,,"","","","" +78,"feat: new script","Script that enters one fine stay data into platform table but has a different column layout then the other one fine stay sheets.","AlbertUlysses","closed",,,"","","","" +77,"refactor: include steps for entire folder","Added some new code that helps with multiple fies in a folder. This should be moved into it's own module in the future.","AlbertUlysses","closed",,,"","","","" +76,"refactor: move scripts to new folder","move all scripting files to processingscript folder and leave helper functions in the transformations folder.","AlbertUlysses","closed",,,"","","","" +75,"feat: platform one fine stay","Adding the custom script for uploading one fine stay data into platform database.","AlbertUlysses","closed",,,"","","","" +74,"refactor: refactor platform luxly","refactor the platform luxly file to make it more readable and closer allign with wemake style guide.","AlbertUlysses","closed",,,"","","","" +73,"docs: update license","updated the license to reflect Hack For LA's best practices. ","AlbertUlysses","closed",,,"","","","" +72,"feat: platform table luxly files","Completed script for loading luxly files into the platform database.","AlbertUlysses","closed",,,"","","","" +71,"feat: add hso revoked table","The script for inserting HSO Revoked data is complete.","AlbertUlysses","closed",,,"","","","" +70,"feat: exempt processing script","The processing script for the ""exempt"" dataset is done.","AlbertUlysses","closed",,,"","","","" +69,"feat: noncompliant script","Add script that inserts noncompliant data into database.","AlbertUlysses","closed",,,"","","","" +68,"feat: categorically inelligible","added a new script for the categorically ineligible dataset.","AlbertUlysses","closed",,,"","","","" 67,"feat: add complaints script","Added a new script that inputs Complaints data into the database. -","AlbertUlysses","closed",,,"","","" +","AlbertUlysses","closed",,,"","","","" 64,"feat: tot insert file","New insert script complete. -","AlbertUlysses","closed",,,"","","" -63,"feat: add new abstraction function","refactored some code and created a new abstraction function for inserting data. ","AlbertUlysses","closed",,,"","","" -62,"quick fix","removed absolute path : quick fix","AlbertUlysses","closed",,,"","","" +","AlbertUlysses","closed",,,"","","","" +63,"feat: add new abstraction function","refactored some code and created a new abstraction function for inserting data. ","AlbertUlysses","closed",,,"","","","" +62,"quick fix","removed absolute path : quick fix","AlbertUlysses","closed",,,"","","","" 61,"feat: hso_denials insert function","Created a new file for hso_denial table, Added test for the custom functions. Added the code to insert the hso_denials data into the db. 1st dataset that's completely done -Need to go back to refactor the code because it's ugly","AlbertUlysses","closed",,,"","","" -58,"docs: adding COPYING file","removed some old jupyter files and added a license file which is called COPYING per GNU's suggestion.","AlbertUlysses","closed",,,"","","" -57,"Feat: add Airbnb tables","added airbnb tables and updated SQL Alchemy relationships.","AlbertUlysses","closed",,,"","","" -56,"feat: add SQL Alchemy models/Database","added SQL Alchemy Models and the Database code. ","AlbertUlysses","closed",,,"","","" -55,"docs: rewrote some docstrings etc","updated some doc strings in files to better reflect the intent.","AlbertUlysses","closed",,,"","","" -54,"feat: new code for date entries","I added new code and tests for formatting date columns. ","AlbertUlysses","closed",,,"","","" +Need to go back to refactor the code because it's ugly","AlbertUlysses","closed",,,"","","","" +58,"docs: adding COPYING file","removed some old jupyter files and added a license file which is called COPYING per GNU's suggestion.","AlbertUlysses","closed",,,"","","","" +57,"Feat: add Airbnb tables","added airbnb tables and updated SQL Alchemy relationships.","AlbertUlysses","closed",,,"","","","" +56,"feat: add SQL Alchemy models/Database","added SQL Alchemy Models and the Database code. ","AlbertUlysses","closed",,,"","","","" +55,"docs: rewrote some docstrings etc","updated some doc strings in files to better reflect the intent.","AlbertUlysses","closed",,,"","","","" +54,"feat: new code for date entries","I added new code and tests for formatting date columns. ","AlbertUlysses","closed",,,"","","","" 53,"refactor: update assessor table","Did some refactoring that addressed these issues: - Rewrote the code to match the WeMake style guide. - The functions are faster, and the memory is more efficient by ten times on average. - The functions are pure now, with no added consequence to the DataFrames or Series passed into them. -All tests are still passing.","AlbertUlysses","closed",,,"","","" -52,"docs: re-wrote some of the normalize_address information","Docs improvement for normailze_address","AlbertUlysses","closed",,,"","","" -51,"refactored transformation scripts","refactored a lot and added extra notes in read me ","AlbertUlysses","closed",,,"","","" +All tests are still passing.","AlbertUlysses","closed",,,"","","","" +52,"docs: re-wrote some of the normalize_address information","Docs improvement for normailze_address","AlbertUlysses","closed",,,"","","","" +51,"refactored transformation scripts","refactored a lot and added extra notes in read me ","AlbertUlysses","closed",,,"","","","" 50,"tot update/completion","ToT table is done. Refactored it a bit - still passing test -","AlbertUlysses","closed",,,"","","" -49,"work on tot table","WIP - started work on TOT table and tests I will finish in tomorrow and start on a new table.","AlbertUlysses","closed",,,"","","" -48,"Add expemtion table transformations","I added the transformation functions for exempt table with their tests, renamed the main folder.","AlbertUlysses","closed",,,"","","" +","AlbertUlysses","closed",,,"","","","" +49,"work on tot table","WIP - started work on TOT table and tests I will finish in tomorrow and start on a new table.","AlbertUlysses","closed",,,"","","","" +48,"Add expemtion table transformations","I added the transformation functions for exempt table with their tests, renamed the main folder.","AlbertUlysses","closed",,,"","","","" 47,"updating folder layout and adding transformation scripts with tests","introduces the first of a few scripts to the project along with some tests. Needs refactoring but all tests are passing. -","AlbertUlysses","closed",,,"","","" -42,"Update Pedestrian Data folder","Pedestrian Data folder including the relevant notebooks and visualizations.","dplem","closed",,,"","","" -41,"Push request redo preprocessing files","","KarinaLopez19","closed",,,"","","AlbertUlysses" +","AlbertUlysses","closed",,,"","","","" +42,"Update Pedestrian Data folder","Pedestrian Data folder including the relevant notebooks and visualizations.","dplem","closed",,,"","","","" +41,"Push request redo preprocessing files","","KarinaLopez19","closed",,,"","","AlbertUlysses","" 40,"Extend Mapillary API wrapper","These are new features for the python wrapper for Mapillary's API to allow it to: - Make requests of other layers of Mapillary data beyond traffic signs @@ -145,10 +145,10 @@ All tests are still passing.","AlbertUlysses","closed",,,"","","" - Parse Mapillary's latitude/longitude string format -- Give feedback while downloading data and timeout after 5 minutes, to prevent downloads from failing silently","henrykaplan","closed",,,"","henrykaplan","dplem,AlbertUlysses" -39,"add bbox function","Created bbox function to help create a boundary box for cleaning data. Added one test to ensure any refactoring in the future still returns the correct expected results. ","AlbertUlysses","closed",,,"","","" -38,"this is a test","","KarinaLopez19","closed",,,"","","" +- Give feedback while downloading data and timeout after 5 minutes, to prevent downloads from failing silently","henrykaplan","closed",,,"","henrykaplan","dplem,AlbertUlysses","" +39,"add bbox function","Created bbox function to help create a boundary box for cleaning data. Added one test to ensure any refactoring in the future still returns the correct expected results. ","AlbertUlysses","closed",,,"","","","" +38,"this is a test","","KarinaLopez19","closed",,,"","","","" 37,"add new folder with new functions to help with clean up","Adding the folder for airbnb listing and first function for helper functions that will be used for cleaning the data -","AlbertUlysses","closed",,,"","","" -35,"clean up jupyter notebook","cleaned up the jupyter notebook to make it more user friendly","AlbertUlysses","closed",,,"","","" -34,"chore: clean up modules","used black to clean up modules and removed commented code that wasn't being used.","AlbertUlysses","closed",,,"","","" +","AlbertUlysses","closed",,,"","","","" +35,"clean up jupyter notebook","cleaned up the jupyter notebook to make it more user friendly","AlbertUlysses","closed",,,"","","","" +34,"chore: clean up modules","used black to clean up modules and removed commented code that wasn't being used.","AlbertUlysses","closed",,,"","","","" From 892a336695b7db5a1c90cafa20284be64484a7ca Mon Sep 17 00:00:00 2001 From: n2020h <72112832+n2020h@users.noreply.github.com> Date: Mon, 12 Aug 2024 17:11:35 -0400 Subject: [PATCH 38/49] Update pull_requests_to_csv.yml Include linked issues from "development" field in PR page --- .github/workflows/pull_requests_to_csv.yml | 113 ++++++++++++++++++--- 1 file changed, 100 insertions(+), 13 deletions(-) diff --git a/.github/workflows/pull_requests_to_csv.yml b/.github/workflows/pull_requests_to_csv.yml index a2a8bce..c5aeb00 100644 --- a/.github/workflows/pull_requests_to_csv.yml +++ b/.github/workflows/pull_requests_to_csv.yml @@ -7,10 +7,7 @@ on: workflow_dispatch: schedule: - cron: '0 0 * * *' # Runs daily at midnight - # pull_request: - # types: [opened, closed, reopened] - # branches: - # - n2020h-issues-to-csv + jobs: list-pull-requests: runs-on: ubuntu-latest @@ -20,12 +17,12 @@ jobs: - name: Checkout repository uses: actions/checkout@v3 -###------------------------------------------------------####### # Set up Node.js to use jq command - name: Set up Node.js uses: actions/setup-node@v3 with: node-version: '20' + # Fetch pull requests data and save it to pulls.json - name: Fetch pull requests data env: @@ -36,15 +33,22 @@ jobs: "https://api.github.com/repos/${{ github.repository }}/pulls?state=all&per_page=100" \ -o pulls.json - # Check the content of pulls.json for debugging - - name: Display pulls.json content - run: cat pulls.json + # Fetch linked issues for each pull request and save to timeline.json + - name: Fetch linked issues for each PR + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + jq -r '.[].number' pulls.json | while read pr; do \ + curl -H "Authorization: token $GITHUB_TOKEN" \ + -H "Accept: application/vnd.github.v3+json" \ + "https://api.github.com/repos/${{ github.repository }}/issues/$pr/timeline?per_page=100" \ + -o "timeline_$pr.json"; \ + done - # Generate pull requests CSV - # (.body | capture_all("#(?\\d+)"; "g") | join(",")) + # Generate pull requests CSV including linked issues - name: Generate pull requests CSV run: | - echo "PR Number,Title,Description,Author,State,Number of Commits,Number of Files Changed,Labels,Assignees,Reviewers, Linked Issues" > pull_requests.csv + echo "PR Number,Title,Description,Author,State,Number of Commits,Number of Files Changed,Labels,Assignees,Reviewers,Linked Issues" > pull_requests.csv jq -r '.[] | select(.user.login != "dependabot[bot]") | [ .number, .title, @@ -56,8 +60,14 @@ jobs: (.labels | map(.name) | join(",")), (.assignees | map(.login) | join(",")), (.requested_reviewers | map(.login) | join(",")), - (if .body != null then .body | gsub("#";" ") | split(" ") | map(select(startswith("issue_number"))) | join(",") else "" end) - ] | @csv' pulls.json >> pull_requests.csv + (if .number as $pr | .body != null then + (input_filename | sub("timeline_";"") | sub(".json";"")) as $pr_number | + (try input | .[] | select(.event == "cross-referenced" and .source.issue) | + .source.issue.number | tostring + ": " + .source.issue.title | join(", ")) + else + "" + end) + ] | @csv' pulls.json timeline_*.json >> pull_requests.csv # Check the content of pull_requests.csv for debugging - name: Display pull_requests.csv content @@ -75,6 +85,83 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} +##------------------------------------------## +# name: List Pull Requests and Output as CSV + +# on: +# push: +# branches: +# - n2020h-issues-to-csv +# workflow_dispatch: +# schedule: +# - cron: '0 0 * * *' # Runs daily at midnight +# # pull_request: +# # types: [opened, closed, reopened] +# # branches: +# # - n2020h-issues-to-csv +# jobs: +# list-pull-requests: +# runs-on: ubuntu-latest + +# steps: +# # Checkout the repository to access any scripts or tools you might need +# - name: Checkout repository +# uses: actions/checkout@v3 + +# # Set up Node.js to use jq command +# - name: Set up Node.js +# uses: actions/setup-node@v3 +# with: +# node-version: '20' +# # Fetch pull requests data and save it to pulls.json +# - name: Fetch pull requests data +# env: +# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} +# run: | +# curl -H "Authorization: token $GITHUB_TOKEN" \ +# -H "Accept: application/vnd.github.v3+json" \ +# "https://api.github.com/repos/${{ github.repository }}/pulls?state=all&per_page=100" \ +# -o pulls.json + +# # Check the content of pulls.json for debugging +# - name: Display pulls.json content +# run: cat pulls.json + +# # Generate pull requests CSV +# # (.body | capture_all("#(?\\d+)"; "g") | join(",")) +# - name: Generate pull requests CSV +# run: | +# echo "PR Number,Title,Description,Author,State,Number of Commits,Number of Files Changed,Labels,Assignees,Reviewers, Linked Issues" > pull_requests.csv +# jq -r '.[] | select(.user.login != "dependabot[bot]") | [ +# .number, +# .title, +# .body, +# .user.login, +# .state, +# .commits, +# .changed_files, +# (.labels | map(.name) | join(",")), +# (.assignees | map(.login) | join(",")), +# (.requested_reviewers | map(.login) | join(",")), +# (if .body != null then .body | gsub("#";" ") | split(" ") | map(select(startswith("issue_number"))) | join(",") else "" end) +# ] | @csv' pulls.json >> pull_requests.csv + +# # Check the content of pull_requests.csv for debugging +# - name: Display pull_requests.csv content +# run: cat pull_requests.csv + +# # Commit and push the generated CSV to the repository +# - name: Commit and push CSV +# run: | +# git config user.name "Automated" +# git config user.email "actions@users.noreply.github.com" +# git add -f pull_requests.csv +# timestamp=$(date -u) +# git commit -m "Latest pull requests data: ${timestamp}" || exit 0 +# git push --force origin HEAD:refs/heads/n2020h-issues-to-csv +# env: +# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + ####-------------------------------------------------####### # # Generate pull requests CSV From da35cd0663fb066a7e402e9359e8c5c57eec8160 Mon Sep 17 00:00:00 2001 From: n2020h <72112832+n2020h@users.noreply.github.com> Date: Mon, 12 Aug 2024 17:15:46 -0400 Subject: [PATCH 39/49] Update pull_requests_to_csv.yml Null case --- .github/workflows/pull_requests_to_csv.yml | 56 ++++++++++++---------- 1 file changed, 32 insertions(+), 24 deletions(-) diff --git a/.github/workflows/pull_requests_to_csv.yml b/.github/workflows/pull_requests_to_csv.yml index c5aeb00..60a2be4 100644 --- a/.github/workflows/pull_requests_to_csv.yml +++ b/.github/workflows/pull_requests_to_csv.yml @@ -33,41 +33,48 @@ jobs: "https://api.github.com/repos/${{ github.repository }}/pulls?state=all&per_page=100" \ -o pulls.json - # Fetch linked issues for each pull request and save to timeline.json + # Fetch linked issues for each PR - name: Fetch linked issues for each PR env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | - jq -r '.[].number' pulls.json | while read pr; do \ + for pr_number in $(jq -r '.[].number' pulls.json); do \ curl -H "Authorization: token $GITHUB_TOKEN" \ -H "Accept: application/vnd.github.v3+json" \ - "https://api.github.com/repos/${{ github.repository }}/issues/$pr/timeline?per_page=100" \ - -o "timeline_$pr.json"; \ + "https://api.github.com/repos/${{ github.repository }}/issues/$pr_number/timeline?per_page=100" \ + -o "timeline_$pr_number.json"; \ done # Generate pull requests CSV including linked issues - - name: Generate pull requests CSV + - name: Generate pull requests CSV including linked issues run: | echo "PR Number,Title,Description,Author,State,Number of Commits,Number of Files Changed,Labels,Assignees,Reviewers,Linked Issues" > pull_requests.csv - jq -r '.[] | select(.user.login != "dependabot[bot]") | [ - .number, - .title, - .body, - .user.login, - .state, - .commits, - .changed_files, - (.labels | map(.name) | join(",")), - (.assignees | map(.login) | join(",")), - (.requested_reviewers | map(.login) | join(",")), - (if .number as $pr | .body != null then - (input_filename | sub("timeline_";"") | sub(".json";"")) as $pr_number | - (try input | .[] | select(.event == "cross-referenced" and .source.issue) | - .source.issue.number | tostring + ": " + .source.issue.title | join(", ")) - else - "" - end) - ] | @csv' pulls.json timeline_*.json >> pull_requests.csv + + for pr_number in $(jq -r '.[].number' pulls.json); do + timeline_file="timeline_$pr_number.json" + + # Ensure the timeline file is not empty before processing + if [ -s "$timeline_file" ]; then + linked_issues=$(jq -r '[.[] | select(.event == "cross-referenced" and .source.issue) | .source.issue.number | tostring] | join(", ")' "$timeline_file") + else + linked_issues="" + fi + + jq -r --arg linked_issues "$linked_issues" \ + '.[] | select(.number == '$pr_number') | [ + .number, + .title, + .body, + .user.login, + .state, + .commits, + .changed_files, + (.labels | map(.name) | join(",")), + (.assignees | map(.login) | join(",")), + (.requested_reviewers | map(.login) | join(",")), + $linked_issues + ] | @csv' pulls.json >> pull_requests.csv + done # Check the content of pull_requests.csv for debugging - name: Display pull_requests.csv content @@ -85,6 +92,7 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + ##------------------------------------------## # name: List Pull Requests and Output as CSV From 66e06d9e098f94b6b3f7bc3a67a2374a9183839d Mon Sep 17 00:00:00 2001 From: Automated Date: Mon, 12 Aug 2024 21:16:51 +0000 Subject: [PATCH 40/49] Latest pull requests data: Mon Aug 12 21:16:51 UTC 2024 --- pull_requests.csv | 1182 ++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 1177 insertions(+), 5 deletions(-) diff --git a/pull_requests.csv b/pull_requests.csv index 03e9b2b..6822c75 100644 --- a/pull_requests.csv +++ b/pull_requests.csv @@ -1,4 +1,4 @@ -PR Number,Title,Description,Author,State,Number of Commits,Number of Files Changed,Labels,Assignees,Reviewers, Linked Issues +PR Number,Title,Description,Author,State,Number of Commits,Number of Files Changed,Labels,Assignees,Reviewers,Linked Issues 205,"Added data loading and cleaning Jupyter notebook","- Initial implementation of data loading/ saving and cleaning. - Improvements planned for future versions.","mru-hub","closed",,,"","","","" 201,"feat: City of Los Angeles Evictions #179","This commit introduces basic EDA on the LA evictions data, It introduces explode/splitting of Just Cause column for effective analysis @@ -23,7 +23,7 @@ starting #179 ","rahul897","open",,,"","","","" - To see examples of the commit history of the various methods of merging, see these branches in my forked repo: - [Using rebase & merge](https://github.com/JessicaLucindaCheng/data-science/commits/rebase-merge): Recommended - [Using merge commit](https://github.com/JessicaLucindaCheng/data-science/commits/merge-commit): Okay but results in 2 commits - - [Using squash & merge](https://github.com/JessicaLucindaCheng/data-science/commits/squash-merge): Not recommended because it doesn't preserve author's credit","JessicaLucindaCheng","closed",,,"","","","" + - [Using squash & merge](https://github.com/JessicaLucindaCheng/data-science/commits/squash-merge): Not recommended because it doesn't preserve author's credit","JessicaLucindaCheng","closed",,,"","","","5127, 2607, 6139, 296" 187,"Moving Workshop-Feedback_Bigram_Analysis.ipynb from access-the-data repo","Fixes N/A - ### What changes did you make? @@ -42,14 +42,796 @@ starting #179 ","rahul897","open",,,"","","","" 186,"Lalla la crime analysis 2",,"Lalla22","open",,,"","","salice","" 185,"Create README.md",,"Lalla22","closed",,,"","","","" 184,"git c",,"Lalla22","closed",,,"","","","" +176,"build(deps): bump cryptography from 3.3.2 to 39.0.1 in /311-data/mapillarywrapper","Bumps [cryptography](https://github.com/pyca/cryptography) from 3.3.2 to 39.0.1. +
+Changelog +

Sourced from cryptography's changelog.

+
+

39.0.1 - 2023-02-07

+

+* **SECURITY ISSUE** - Fixed a bug where ``Cipher.update_into`` accepted Python
+  buffer protocol objects, but allowed immutable buffers. **CVE-2023-23931**
+* Updated Windows, macOS, and Linux wheels to be compiled with OpenSSL 3.0.8.
+

.. _v39-0-0:

+

39.0.0 - 2023-01-01 +

+
    +
  • BACKWARDS INCOMPATIBLE: Support for OpenSSL 1.1.0 has been removed. +Users on older version of OpenSSL will need to upgrade.
  • +
  • BACKWARDS INCOMPATIBLE: Dropped support for LibreSSL < 3.5. The new +minimum LibreSSL version is 3.5.0. Going forward our policy is to support +versions of LibreSSL that are available in versions of OpenBSD that are +still receiving security support.
  • +
  • BACKWARDS INCOMPATIBLE: Removed the encode_point and +from_encoded_point methods on +:class:~cryptography.hazmat.primitives.asymmetric.ec.EllipticCurvePublicNumbers, +which had been deprecated for several years. +:meth:~cryptography.hazmat.primitives.asymmetric.ec.EllipticCurvePublicKey.public_bytes +and +:meth:~cryptography.hazmat.primitives.asymmetric.ec.EllipticCurvePublicKey.from_encoded_point +should be used instead.
  • +
  • BACKWARDS INCOMPATIBLE: Support for using MD5 or SHA1 in +:class:~cryptography.x509.CertificateBuilder, other X.509 builders, and +PKCS7 has been removed.
  • +
  • BACKWARDS INCOMPATIBLE: Dropped support for macOS 10.10 and 10.11, macOS +users must upgrade to 10.12 or newer.
  • +
  • ANNOUNCEMENT: The next version of cryptography (40.0) will change +the way we link OpenSSL. This will only impact users who build +cryptography from source (i.e., not from a wheel), and specify their +own version of OpenSSL. For those users, the CFLAGS, LDFLAGS, +INCLUDE, LIB, and CRYPTOGRAPHY_SUPPRESS_LINK_FLAGS environment +variables will no longer be respected. Instead, users will need to +configure their builds as documented here_.
  • +
  • Added support for +:ref:disabling the legacy provider in OpenSSL 3.0.x<legacy-provider>.
  • +
  • Added support for disabling RSA key validation checks when loading RSA +keys via +:func:~cryptography.hazmat.primitives.serialization.load_pem_private_key, +:func:~cryptography.hazmat.primitives.serialization.load_der_private_key, +and +:meth:~cryptography.hazmat.primitives.asymmetric.rsa.RSAPrivateNumbers.private_key. +This speeds up key loading but is :term:unsafe if you are loading potentially +attacker supplied keys.
  • +
  • Significantly improved performance for +:class:~cryptography.hazmat.primitives.ciphers.aead.ChaCha20Poly1305
  • +
+ +
+

... (truncated)

+
+
+Commits + +
+
+ + +[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=cryptography&package-manager=pip&previous-version=3.3.2&new-version=39.0.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) + +Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. + +[//]: # (dependabot-automerge-start) +[//]: # (dependabot-automerge-end) + +--- + +
+Dependabot commands and options +
+ +You can trigger Dependabot actions by commenting on this PR: +- `@dependabot rebase` will rebase this PR +- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it +- `@dependabot merge` will merge this PR after your CI passes on it +- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it +- `@dependabot cancel merge` will cancel a previously requested merge and block automerging +- `@dependabot reopen` will reopen this PR if it is closed +- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually +- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) +- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language +- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language +- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language +- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language + +You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts). + +
","dependabot[bot]","open",,,"dependencies","","","" +175,"build(deps): bump certifi from 2021.5.30 to 2022.12.7 in /LAANE","Bumps [certifi](https://github.com/certifi/python-certifi) from 2021.5.30 to 2022.12.7. +
+Commits + +
+
+ + +[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=certifi&package-manager=pip&previous-version=2021.5.30&new-version=2022.12.7)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) + +Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. + +[//]: # (dependabot-automerge-start) +[//]: # (dependabot-automerge-end) + +--- + +
+Dependabot commands and options +
+ +You can trigger Dependabot actions by commenting on this PR: +- `@dependabot rebase` will rebase this PR +- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it +- `@dependabot merge` will merge this PR after your CI passes on it +- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it +- `@dependabot cancel merge` will cancel a previously requested merge and block automerging +- `@dependabot reopen` will reopen this PR if it is closed +- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually +- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) +- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language +- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language +- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language +- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language + +You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts). + +
","dependabot[bot]","open",,,"dependencies","","","" +174,"build(deps): bump certifi from 2020.6.20 to 2022.12.7 in /311-data/mapillarywrapper","Bumps [certifi](https://github.com/certifi/python-certifi) from 2020.6.20 to 2022.12.7. +
+Commits + +
+
+ + +[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=certifi&package-manager=pip&previous-version=2020.6.20&new-version=2022.12.7)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) + +Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. + +[//]: # (dependabot-automerge-start) +[//]: # (dependabot-automerge-end) + +--- + +
+Dependabot commands and options +
+ +You can trigger Dependabot actions by commenting on this PR: +- `@dependabot rebase` will rebase this PR +- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it +- `@dependabot merge` will merge this PR after your CI passes on it +- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it +- `@dependabot cancel merge` will cancel a previously requested merge and block automerging +- `@dependabot reopen` will reopen this PR if it is closed +- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually +- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) +- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language +- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language +- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language +- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language + +You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts). + +
","dependabot[bot]","open",,,"dependencies","","","" +173,"build(deps): bump nbconvert from 5.6.1 to 6.5.1 in /311-data/mapillarywrapper","Bumps [nbconvert](https://github.com/jupyter/nbconvert) from 5.6.1 to 6.5.1. +
+Release notes +

Sourced from nbconvert's releases.

+
+

Release 6.5.1

+

No release notes provided.

+

6.5.0

+

What's Changed

+ +

New Contributors

+ +

Full Changelog: https://github.com/jupyter/nbconvert/compare/6.4.5...6.5

+

6.4.3

+

What's Changed

+ +

New Contributors

+ +

Full Changelog: https://github.com/jupyter/nbconvert/compare/6.4.2...6.4.3

+

6.4.0

+

What's Changed

+ +

New Contributors

+ +
+

... (truncated)

+
+
+Commits + +
+
+ + +[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=nbconvert&package-manager=pip&previous-version=5.6.1&new-version=6.5.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) + +Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. + +[//]: # (dependabot-automerge-start) +[//]: # (dependabot-automerge-end) + +--- + +
+Dependabot commands and options +
+ +You can trigger Dependabot actions by commenting on this PR: +- `@dependabot rebase` will rebase this PR +- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it +- `@dependabot merge` will merge this PR after your CI passes on it +- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it +- `@dependabot cancel merge` will cancel a previously requested merge and block automerging +- `@dependabot reopen` will reopen this PR if it is closed +- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually +- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) +- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language +- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language +- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language +- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language + +You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts). + +
","dependabot[bot]","open",,,"dependencies","","","172" +172,"build(deps): bump nbconvert from 5.6.1 to 6.3.0 in /311-data/mapillarywrapper","Bumps [nbconvert](https://github.com/jupyter/nbconvert) from 5.6.1 to 6.3.0. +
+Commits + +
+
+ + +[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=nbconvert&package-manager=pip&previous-version=5.6.1&new-version=6.3.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) + +Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. + +[//]: # (dependabot-automerge-start) +[//]: # (dependabot-automerge-end) + +--- + +
+Dependabot commands and options +
+ +You can trigger Dependabot actions by commenting on this PR: +- `@dependabot rebase` will rebase this PR +- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it +- `@dependabot merge` will merge this PR after your CI passes on it +- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it +- `@dependabot cancel merge` will cancel a previously requested merge and block automerging +- `@dependabot reopen` will reopen this PR if it is closed +- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually +- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) +- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language +- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language +- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language +- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language + +You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts). + +
","dependabot[bot]","closed",,,"dependencies","","","" +171,"build(deps): bump mistune from 0.8.4 to 2.0.3 in /311-data/mapillarywrapper","Bumps [mistune](https://github.com/lepture/mistune) from 0.8.4 to 2.0.3. +
+Release notes +

Sourced from mistune's releases.

+
+

Version 2.0.2

+

Fix escape_url via lepture/mistune#295

+

Version 2.0.1

+

Fix XSS for image link syntax.

+

Version 2.0.0

+

First release of Mistune v2.

+

Version 2.0.0 RC1

+

In this release, we have a Security Fix for harmful links.

+

Version 2.0.0 Alpha 1

+

This is the first release of v2. An alpha version for users to have a preview of the new mistune.

+
+
+
+Changelog +

Sourced from mistune's changelog.

+
+

Changelog

+

Here is the full history of mistune v2.

+

Version 2.0.4

+

+Released on Jul 15, 2022
+
    +
  • Fix url plugin in &lt;a&gt; tag
  • +
  • Fix * formatting
  • +
+

Version 2.0.3 +

+

Released on Jun 27, 2022

+
    +
  • Fix table plugin
  • +
  • Security fix for CVE-2022-34749
  • +
+

Version 2.0.2

+

+Released on Jan 14, 2022
+

Fix escape_url

+

Version 2.0.1 +

+

Released on Dec 30, 2021

+

XSS fix for image link syntax.

+

Version 2.0.0

+

+Released on Dec 5, 2021
+

This is the first non-alpha release of mistune v2.

+

Version 2.0.0rc1 +

+

Released on Feb 16, 2021

+

Version 2.0.0a6

+

+</tr></table> 
+
+
+

... (truncated)

+
+
+Commits +
    +
  • 3f422f1 Version bump 2.0.3
  • +
  • a6d4321 Fix asteris emphasis regex CVE-2022-34749
  • +
  • 5638e46 Merge pull request #307 from jieter/patch-1
  • +
  • 0eba471 Fix typo in guide.rst
  • +
  • 61e9337 Fix table plugin
  • +
  • 76dec68 Add documentation for renderer heading when TOC enabled
  • +
  • 799cd11 Version bump 2.0.2
  • +
  • babb0cf Merge pull request #295 from dairiki/bug.escape_url
  • +
  • fc2cd53 Make mistune.util.escape_url less aggressive
  • +
  • 3e8d352 Version bump 2.0.1
  • +
  • Additional commits viewable in compare view
  • +
+
+
+ + +[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=mistune&package-manager=pip&previous-version=0.8.4&new-version=2.0.3)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) + +Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. + +[//]: # (dependabot-automerge-start) +[//]: # (dependabot-automerge-end) + +--- + +
+Dependabot commands and options +
+ +You can trigger Dependabot actions by commenting on this PR: +- `@dependabot rebase` will rebase this PR +- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it +- `@dependabot merge` will merge this PR after your CI passes on it +- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it +- `@dependabot cancel merge` will cancel a previously requested merge and block automerging +- `@dependabot reopen` will reopen this PR if it is closed +- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually +- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) +- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language +- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language +- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language +- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language + +You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts). + +
","dependabot[bot]","open",,,"dependencies","","","" 168,"Updating label analysis jupyter notebook with co-occurrence analysis",,"rbianchetti","open",,,"","","","" 167,"Merge pull request #164 from hackforla/160-survey-repo-labels","A python script that gets all the labels used in open issues across a…","codemamma","open",,,"","","","" -164,"A python script that gets all the labels used in open issues across a…","…n organization(s) via GitHub API","rbianchetti","closed",,,"","","","" +166,"build(deps): bump numpy from 1.21.2 to 1.22.0 in /LAANE","Bumps [numpy](https://github.com/numpy/numpy) from 1.21.2 to 1.22.0. +
+Release notes +

Sourced from numpy's releases.

+
+

v1.22.0

+

NumPy 1.22.0 Release Notes

+

NumPy 1.22.0 is a big release featuring the work of 153 contributors +spread over 609 pull requests. There have been many improvements, +highlights are:

+
    +
  • Annotations of the main namespace are essentially complete. Upstream +is a moving target, so there will likely be further improvements, +but the major work is done. This is probably the most user visible +enhancement in this release.
  • +
  • A preliminary version of the proposed Array-API is provided. This is +a step in creating a standard collection of functions that can be +used across application such as CuPy and JAX.
  • +
  • NumPy now has a DLPack backend. DLPack provides a common interchange +format for array (tensor) data.
  • +
  • New methods for quantile, percentile, and related functions. The +new methods provide a complete set of the methods commonly found in +the literature.
  • +
  • A new configurable allocator for use by downstream projects.
  • +
+

These are in addition to the ongoing work to provide SIMD support for +commonly used functions, improvements to F2PY, and better documentation.

+

The Python versions supported in this release are 3.8-3.10, Python 3.7 +has been dropped. Note that 32 bit wheels are only provided for Python +3.8 and 3.9 on Windows, all other wheels are 64 bits on account of +Ubuntu, Fedora, and other Linux distributions dropping 32 bit support. +All 64 bit wheels are also linked with 64 bit integer OpenBLAS, which should fix +the occasional problems encountered by folks using truly huge arrays.

+

Expired deprecations

+

Deprecated numeric style dtype strings have been removed

+

Using the strings "Bytes0", "Datetime64", "Str0", "Uint32", +and "Uint64" as a dtype will now raise a TypeError.

+

(gh-19539)

+

Expired deprecations for loads, ndfromtxt, and mafromtxt in npyio

+

numpy.loads was deprecated in v1.15, with the recommendation that +users use pickle.loads instead. ndfromtxt and mafromtxt were both +deprecated in v1.17 - users should use numpy.genfromtxt instead with +the appropriate value for the usemask parameter.

+

(gh-19615)

+ +
+

... (truncated)

+
+
+Commits + +
+
+ + +[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=numpy&package-manager=pip&previous-version=1.21.2&new-version=1.22.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) + +Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. + +[//]: # (dependabot-automerge-start) +[//]: # (dependabot-automerge-end) + +--- + +
+Dependabot commands and options +
+ +You can trigger Dependabot actions by commenting on this PR: +- `@dependabot rebase` will rebase this PR +- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it +- `@dependabot merge` will merge this PR after your CI passes on it +- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it +- `@dependabot cancel merge` will cancel a previously requested merge and block automerging +- `@dependabot reopen` will reopen this PR if it is closed +- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually +- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) +- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language +- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language +- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language +- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language + +You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts). + +
","dependabot[bot]","open",,,"dependencies","","","" +165,"build(deps): bump numpy from 1.18.5 to 1.22.0 in /311-data/mapillarywrapper","Bumps [numpy](https://github.com/numpy/numpy) from 1.18.5 to 1.22.0. +
+Release notes +

Sourced from numpy's releases.

+
+

v1.22.0

+

NumPy 1.22.0 Release Notes

+

NumPy 1.22.0 is a big release featuring the work of 153 contributors +spread over 609 pull requests. There have been many improvements, +highlights are:

+
    +
  • Annotations of the main namespace are essentially complete. Upstream +is a moving target, so there will likely be further improvements, +but the major work is done. This is probably the most user visible +enhancement in this release.
  • +
  • A preliminary version of the proposed Array-API is provided. This is +a step in creating a standard collection of functions that can be +used across application such as CuPy and JAX.
  • +
  • NumPy now has a DLPack backend. DLPack provides a common interchange +format for array (tensor) data.
  • +
  • New methods for quantile, percentile, and related functions. The +new methods provide a complete set of the methods commonly found in +the literature.
  • +
  • A new configurable allocator for use by downstream projects.
  • +
+

These are in addition to the ongoing work to provide SIMD support for +commonly used functions, improvements to F2PY, and better documentation.

+

The Python versions supported in this release are 3.8-3.10, Python 3.7 +has been dropped. Note that 32 bit wheels are only provided for Python +3.8 and 3.9 on Windows, all other wheels are 64 bits on account of +Ubuntu, Fedora, and other Linux distributions dropping 32 bit support. +All 64 bit wheels are also linked with 64 bit integer OpenBLAS, which should fix +the occasional problems encountered by folks using truly huge arrays.

+

Expired deprecations

+

Deprecated numeric style dtype strings have been removed

+

Using the strings "Bytes0", "Datetime64", "Str0", "Uint32", +and "Uint64" as a dtype will now raise a TypeError.

+

(gh-19539)

+

Expired deprecations for loads, ndfromtxt, and mafromtxt in npyio

+

numpy.loads was deprecated in v1.15, with the recommendation that +users use pickle.loads instead. ndfromtxt and mafromtxt were both +deprecated in v1.17 - users should use numpy.genfromtxt instead with +the appropriate value for the usemask parameter.

+

(gh-19615)

+ +
+

... (truncated)

+
+
+Commits + +
+
+ + +[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=numpy&package-manager=pip&previous-version=1.18.5&new-version=1.22.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) + +Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. + +[//]: # (dependabot-automerge-start) +[//]: # (dependabot-automerge-end) + +--- + +
+Dependabot commands and options +
+ +You can trigger Dependabot actions by commenting on this PR: +- `@dependabot rebase` will rebase this PR +- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it +- `@dependabot merge` will merge this PR after your CI passes on it +- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it +- `@dependabot cancel merge` will cancel a previously requested merge and block automerging +- `@dependabot reopen` will reopen this PR if it is closed +- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually +- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) +- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language +- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language +- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language +- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language + +You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts). + +
","dependabot[bot]","open",,,"dependencies","","","142" +164,"A python script that gets all the labels used in open issues across a…","…n organization(s) via GitHub API","rbianchetti","closed",,,"","","","26" 159,"Linking latest pedestrian safety report presentation","This replaces a previous powerpoint presentation stored in the repo with links to the current report and presentation saved in Hack for LA's shared google drive.","henrykaplan","open",,,"","","akhaleghi","" -158,"Data Science logo links and image files","The latest version of the Hack for LA data science logo is being worked on in Google Drive. I've updated the directory in the git repo with links to the latest and with png and svg versions of the latest logo files.","henrykaplan","closed",,,"","","akhaleghi","" +158,"Data Science logo links and image files","The latest version of the Hack for LA data science logo is being worked on in Google Drive. I've updated the directory in the git repo with links to the latest and with png and svg versions of the latest logo files.","henrykaplan","closed",,,"","","akhaleghi","140" +142,"build(deps): bump numpy from 1.18.5 to 1.21.0 in /311-data/mapillarywrapper","Bumps [numpy](https://github.com/numpy/numpy) from 1.18.5 to 1.21.0. +
+Release notes +

Sourced from numpy's releases.

+
+

v1.21.0

+

NumPy 1.21.0 Release Notes

+

The NumPy 1.21.0 release highlights are

+
    +
  • continued SIMD work covering more functions and platforms,
  • +
  • initial work on the new dtype infrastructure and casting,
  • +
  • universal2 wheels for Python 3.8 and Python 3.9 on Mac,
  • +
  • improved documentation,
  • +
  • improved annotations,
  • +
  • new PCG64DXSM bitgenerator for random numbers.
  • +
+

In addition there are the usual large number of bug fixes and other +improvements.

+

The Python versions supported for this release are 3.7-3.9. Official +support for Python 3.10 will be added when it is released.

+

:warning: Warning: there are unresolved problems compiling NumPy 1.21.0 with gcc-11.1 .

+
    +
  • Optimization level -O3 results in many wrong warnings when running the tests.
  • +
  • On some hardware NumPy will hang in an infinite loop.
  • +
+

New functions

+

Add PCG64DXSM BitGenerator

+

Uses of the PCG64 BitGenerator in a massively-parallel context have +been shown to have statistical weaknesses that were not apparent at the +first release in numpy 1.17. Most users will never observe this weakness +and are safe to continue to use PCG64. We have introduced a new +PCG64DXSM BitGenerator that will eventually become the new default +BitGenerator implementation used by default_rng in future releases. +PCG64DXSM solves the statistical weakness while preserving the +performance and the features of PCG64.

+

See upgrading-pcg64 for more details.

+

(gh-18906)

+

Expired deprecations

+
    +
  • The shape argument numpy.unravel_index cannot be +passed as dims keyword argument anymore. (Was deprecated in NumPy +1.16.)
  • +
+ +
+

... (truncated)

+
+
+Commits +
    +
  • b235f9e Merge pull request #19283 from charris/prepare-1.21.0-release
  • +
  • 34aebc2 MAINT: Update 1.21.0-notes.rst
  • +
  • 493b64b MAINT: Update 1.21.0-changelog.rst
  • +
  • 07d7e72 MAINT: Remove accidentally created directory.
  • +
  • 032fca5 Merge pull request #19280 from charris/backport-19277
  • +
  • 7d25b81 BUG: Fix refcount leak in ResultType
  • +
  • fa5754e BUG: Add missing DECREF in new path
  • +
  • 61127bb Merge pull request #19268 from charris/backport-19264
  • +
  • 143d45f Merge pull request #19269 from charris/backport-19228
  • +
  • d80e473 BUG: Removed typing for == and != in dtypes
  • +
  • Additional commits viewable in compare view
  • +
+
+
+ + +[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=numpy&package-manager=pip&previous-version=1.18.5&new-version=1.21.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) + +Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. + +[//]: # (dependabot-automerge-start) +[//]: # (dependabot-automerge-end) + +--- + +
+Dependabot commands and options +
+ +You can trigger Dependabot actions by commenting on this PR: +- `@dependabot rebase` will rebase this PR +- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it +- `@dependabot merge` will merge this PR after your CI passes on it +- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it +- `@dependabot cancel merge` will cancel a previously requested merge and block automerging +- `@dependabot reopen` will reopen this PR if it is closed +- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually +- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) +- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language +- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language +- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language +- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language + +You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts). + +
","dependabot[bot]","closed",,,"dependencies","","","" 139,"Removed spaces from all filenames in pedestrian data directory","Changed filenames of all output files and graphics. No changes to file contents — these are final graphics and reports, and aren't aren't referenced by file path anywhere.","henrykaplan","closed",,,"","","","" 132,"Pedestrian safety","Added readme documentation for downloading and organizing data, running Jupyter notebooks, and reorganized analysis files.","henrykaplan","closed",,,"","","","" -123,"Create a new issue template for epics",,"akhaleghi","closed",,,"","","","" +123,"Create a new issue template for epics",,"akhaleghi","closed",,,"","","","120" 122,"Pedestrian safety draft visualizations and presentation","Further simplified and clarified maps and traffic accident charts, added these new charts and related discussion to the presentation file.","henrykaplan","closed",,,"","","AlbertUlysses","" 117,"Pedestrian safety","- Add optional parameter to mapillary wrapper client to allow data requests of recent data only, to quickly update existing datasets. - Better maps and visualizations. @@ -61,6 +843,175 @@ This PR is a stand alone folder added to the 311-data folder. It contains a webs 115,"docs","updated readme to include sample queries that LAANE is interested in doing against the database","AlbertUlysses","closed",,,"","","","" 114,"docs","Update README/todos","AlbertUlysses","closed",,,"","","","" 113,"docs","Updated misc docs","AlbertUlysses","closed",,,"","","","" +112,"build(deps): bump urllib3 from 1.24.3 to 1.26.5 in /LAANE","Bumps [urllib3](https://github.com/urllib3/urllib3) from 1.24.3 to 1.26.5. +
+Release notes +

Sourced from urllib3's releases.

+
+

1.26.5

+

:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap

+
    +
  • Fixed deprecation warnings emitted in Python 3.10.
  • +
  • Updated vendored six library to 1.16.0.
  • +
  • Improved performance of URL parser when splitting the authority component.
  • +
+

If you or your organization rely on urllib3 consider supporting us via GitHub Sponsors

+

1.26.4

+

:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap

+
    +
  • Changed behavior of the default SSLContext when connecting to HTTPS proxy during HTTPS requests. The default SSLContext now sets check_hostname=True.
  • +
+

If you or your organization rely on urllib3 consider supporting us via GitHub Sponsors

+

1.26.3

+

:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap

+
    +
  • +

    Fixed bytes and string comparison issue with headers (Pull #2141)

    +
  • +
  • +

    Changed ProxySchemeUnknown error message to be more actionable if the user supplies a proxy URL without a scheme (Pull #2107)

    +
  • +
+

If you or your organization rely on urllib3 consider supporting us via GitHub Sponsors

+

1.26.2

+

:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap

+
    +
  • Fixed an issue where wrap_socket and CERT_REQUIRED wouldn't be imported properly on Python 2.7.8 and earlier (Pull #2052)
  • +
+

1.26.1

+

:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap

+
    +
  • Fixed an issue where two User-Agent headers would be sent if a User-Agent header key is passed as bytes (Pull #2047)
  • +
+

1.26.0

+

:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap

+
    +
  • +

    Added support for HTTPS proxies contacting HTTPS servers (Pull #1923, Pull #1806)

    +
  • +
  • +

    Deprecated negotiating TLSv1 and TLSv1.1 by default. Users that +still wish to use TLS earlier than 1.2 without a deprecation warning +should opt-in explicitly by setting ssl_version=ssl.PROTOCOL_TLSv1_1 (Pull #2002) +Starting in urllib3 v2.0: Connections that receive a DeprecationWarning will fail

    +
  • +
  • +

    Deprecated Retry options Retry.DEFAULT_METHOD_WHITELIST, Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST +and Retry(method_whitelist=...) in favor of Retry.DEFAULT_ALLOWED_METHODS, +Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT, and Retry(allowed_methods=...) +(Pull #2000) Starting in urllib3 v2.0: Deprecated options will be removed

    +
  • +
+ +
+

... (truncated)

+
+
+Changelog +

Sourced from urllib3's changelog.

+
+

1.26.5 (2021-05-26)

+
    +
  • Fixed deprecation warnings emitted in Python 3.10.
  • +
  • Updated vendored six library to 1.16.0.
  • +
  • Improved performance of URL parser when splitting +the authority component.
  • +
+

1.26.4 (2021-03-15)

+
    +
  • Changed behavior of the default SSLContext when connecting to HTTPS proxy +during HTTPS requests. The default SSLContext now sets check_hostname=True.
  • +
+

1.26.3 (2021-01-26)

+
    +
  • +

    Fixed bytes and string comparison issue with headers (Pull #2141)

    +
  • +
  • +

    Changed ProxySchemeUnknown error message to be +more actionable if the user supplies a proxy URL without +a scheme. (Pull #2107)

    +
  • +
+

1.26.2 (2020-11-12)

+
    +
  • Fixed an issue where wrap_socket and CERT_REQUIRED wouldn't +be imported properly on Python 2.7.8 and earlier (Pull #2052)
  • +
+

1.26.1 (2020-11-11)

+
    +
  • Fixed an issue where two User-Agent headers would be sent if a +User-Agent header key is passed as bytes (Pull #2047)
  • +
+

1.26.0 (2020-11-10)

+
    +
  • +

    NOTE: urllib3 v2.0 will drop support for Python 2. +Read more in the v2.0 Roadmap <https://urllib3.readthedocs.io/en/latest/v2-roadmap.html>_.

    +
  • +
  • +

    Added support for HTTPS proxies contacting HTTPS servers (Pull #1923, Pull #1806)

    +
  • +
  • +

    Deprecated negotiating TLSv1 and TLSv1.1 by default. Users that +still wish to use TLS earlier than 1.2 without a deprecation warning

    +
  • +
+ +
+

... (truncated)

+
+
+Commits +
    +
  • d161647 Release 1.26.5
  • +
  • 2d4a3fe Improve performance of sub-authority splitting in URL
  • +
  • 2698537 Update vendored six to 1.16.0
  • +
  • 07bed79 Fix deprecation warnings for Python 3.10 ssl module
  • +
  • d725a9b Add Python 3.10 to GitHub Actions
  • +
  • 339ad34 Use pytest==6.2.4 on Python 3.10+
  • +
  • f271c9c Apply latest Black formatting
  • +
  • 1884878 [1.26] Properly proxy EOF on the SSLTransport test suite
  • +
  • a891304 Release 1.26.4
  • +
  • 8d65ea1 Merge pull request from GHSA-5phf-pp7p-vc2r
  • +
  • Additional commits viewable in compare view
  • +
+
+
+ + +[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=urllib3&package-manager=pip&previous-version=1.24.3&new-version=1.26.5)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) + +Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. + +[//]: # (dependabot-automerge-start) +[//]: # (dependabot-automerge-end) + +--- + +
+Dependabot commands and options +
+ +You can trigger Dependabot actions by commenting on this PR: +- `@dependabot rebase` will rebase this PR +- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it +- `@dependabot merge` will merge this PR after your CI passes on it +- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it +- `@dependabot cancel merge` will cancel a previously requested merge and block automerging +- `@dependabot reopen` will reopen this PR if it is closed +- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually +- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) +- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language +- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language +- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language +- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language + +You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts). + +
","dependabot[bot]","closed",,,"dependencies","","","" 111,"Docs!","Updated docs like readme and add extra notes to some other files","AlbertUlysses","closed",,,"","","","" 105,"debug","debugged some code that was entering incorrect addresses","AlbertUlysses","closed",,,"","","","" 104,"docs/clean up","updated README and removed some old files.","AlbertUlysses","closed",,,"","","","" @@ -115,6 +1066,58 @@ Asssesor file (and builds) will now have the custom code and later include the p 61,"feat: hso_denials insert function","Created a new file for hso_denial table, Added test for the custom functions. Added the code to insert the hso_denials data into the db. 1st dataset that's completely done Need to go back to refactor the code because it's ugly","AlbertUlysses","closed",,,"","","","" +60,"build(deps): bump jupyterlab from 2.1.5 to 2.2.10 in /311-data/mapillarywrapper","Bumps [jupyterlab](https://github.com/jupyterlab/jupyterlab) from 2.1.5 to 2.2.10. +
+Commits +
    +
  • 87fff87 New version
  • +
  • 8b88bd1 update canvas version to allow build
  • +
  • d5e3649 Merge pull request from GHSA-4952-p58q-6crx
  • +
  • 9a8dadf Publish 2.2.9
  • +
  • 51fe0db bump version
  • +
  • e8e144b New version
  • +
  • a67a68e Merge pull request #9211 from meeseeksmachine/auto-backport-of-pr-9189-on-2.2.x
  • +
  • 1c7d14e Merge pull request #9173 from datalayer-contrib/2-2-x/revert-perf
  • +
  • b8c5203 Backport PR #9189: Update session and kernel manager data only if there was a...
  • +
  • bbc2959 Merge pull request #9168 from karlaspuldaro/notebook-kernel-display-2.2.x
  • +
  • Additional commits viewable in compare view
  • +
+
+
+ + +[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=jupyterlab&package-manager=pip&previous-version=2.1.5&new-version=2.2.10)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) + +Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. + +[//]: # (dependabot-automerge-start) +[//]: # (dependabot-automerge-end) + +--- + +
+Dependabot commands and options +
+ +You can trigger Dependabot actions by commenting on this PR: +- `@dependabot rebase` will rebase this PR +- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it +- `@dependabot merge` will merge this PR after your CI passes on it +- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it +- `@dependabot cancel merge` will cancel a previously requested merge and block automerging +- `@dependabot reopen` will reopen this PR if it is closed +- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually +- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) +- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language +- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language +- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language +- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language + +You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts). + +
","dependabot[bot]","closed",,,"dependencies","","","" 58,"docs: adding COPYING file","removed some old jupyter files and added a license file which is called COPYING per GNU's suggestion.","AlbertUlysses","closed",,,"","","","" 57,"Feat: add Airbnb tables","added airbnb tables and updated SQL Alchemy relationships.","AlbertUlysses","closed",,,"","","","" 56,"feat: add SQL Alchemy models/Database","added SQL Alchemy Models and the Database code. ","AlbertUlysses","closed",,,"","","","" @@ -135,6 +1138,175 @@ All tests are still passing.","AlbertUlysses","closed",,,"","","","" 47,"updating folder layout and adding transformation scripts with tests","introduces the first of a few scripts to the project along with some tests. Needs refactoring but all tests are passing. ","AlbertUlysses","closed",,,"","","","" +43,"build(deps): bump urllib3 from 1.24.3 to 1.26.5 in /311-data/mapillarywrapper","Bumps [urllib3](https://github.com/urllib3/urllib3) from 1.24.3 to 1.26.5. +
+Release notes +

Sourced from urllib3's releases.

+
+

1.26.5

+

:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap

+
    +
  • Fixed deprecation warnings emitted in Python 3.10.
  • +
  • Updated vendored six library to 1.16.0.
  • +
  • Improved performance of URL parser when splitting the authority component.
  • +
+

If you or your organization rely on urllib3 consider supporting us via GitHub Sponsors

+

1.26.4

+

:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap

+
    +
  • Changed behavior of the default SSLContext when connecting to HTTPS proxy during HTTPS requests. The default SSLContext now sets check_hostname=True.
  • +
+

If you or your organization rely on urllib3 consider supporting us via GitHub Sponsors

+

1.26.3

+

:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap

+
    +
  • +

    Fixed bytes and string comparison issue with headers (Pull #2141)

    +
  • +
  • +

    Changed ProxySchemeUnknown error message to be more actionable if the user supplies a proxy URL without a scheme (Pull #2107)

    +
  • +
+

If you or your organization rely on urllib3 consider supporting us via GitHub Sponsors

+

1.26.2

+

:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap

+
    +
  • Fixed an issue where wrap_socket and CERT_REQUIRED wouldn't be imported properly on Python 2.7.8 and earlier (Pull #2052)
  • +
+

1.26.1

+

:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap

+
    +
  • Fixed an issue where two User-Agent headers would be sent if a User-Agent header key is passed as bytes (Pull #2047)
  • +
+

1.26.0

+

:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap

+
    +
  • +

    Added support for HTTPS proxies contacting HTTPS servers (Pull #1923, Pull #1806)

    +
  • +
  • +

    Deprecated negotiating TLSv1 and TLSv1.1 by default. Users that +still wish to use TLS earlier than 1.2 without a deprecation warning +should opt-in explicitly by setting ssl_version=ssl.PROTOCOL_TLSv1_1 (Pull #2002) +Starting in urllib3 v2.0: Connections that receive a DeprecationWarning will fail

    +
  • +
  • +

    Deprecated Retry options Retry.DEFAULT_METHOD_WHITELIST, Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST +and Retry(method_whitelist=...) in favor of Retry.DEFAULT_ALLOWED_METHODS, +Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT, and Retry(allowed_methods=...) +(Pull #2000) Starting in urllib3 v2.0: Deprecated options will be removed

    +
  • +
+ +
+

... (truncated)

+
+
+Changelog +

Sourced from urllib3's changelog.

+
+

1.26.5 (2021-05-26)

+
    +
  • Fixed deprecation warnings emitted in Python 3.10.
  • +
  • Updated vendored six library to 1.16.0.
  • +
  • Improved performance of URL parser when splitting +the authority component.
  • +
+

1.26.4 (2021-03-15)

+
    +
  • Changed behavior of the default SSLContext when connecting to HTTPS proxy +during HTTPS requests. The default SSLContext now sets check_hostname=True.
  • +
+

1.26.3 (2021-01-26)

+
    +
  • +

    Fixed bytes and string comparison issue with headers (Pull #2141)

    +
  • +
  • +

    Changed ProxySchemeUnknown error message to be +more actionable if the user supplies a proxy URL without +a scheme. (Pull #2107)

    +
  • +
+

1.26.2 (2020-11-12)

+
    +
  • Fixed an issue where wrap_socket and CERT_REQUIRED wouldn't +be imported properly on Python 2.7.8 and earlier (Pull #2052)
  • +
+

1.26.1 (2020-11-11)

+
    +
  • Fixed an issue where two User-Agent headers would be sent if a +User-Agent header key is passed as bytes (Pull #2047)
  • +
+

1.26.0 (2020-11-10)

+
    +
  • +

    NOTE: urllib3 v2.0 will drop support for Python 2. +Read more in the v2.0 Roadmap <https://urllib3.readthedocs.io/en/latest/v2-roadmap.html>_.

    +
  • +
  • +

    Added support for HTTPS proxies contacting HTTPS servers (Pull #1923, Pull #1806)

    +
  • +
  • +

    Deprecated negotiating TLSv1 and TLSv1.1 by default. Users that +still wish to use TLS earlier than 1.2 without a deprecation warning

    +
  • +
+ +
+

... (truncated)

+
+
+Commits +
    +
  • d161647 Release 1.26.5
  • +
  • 2d4a3fe Improve performance of sub-authority splitting in URL
  • +
  • 2698537 Update vendored six to 1.16.0
  • +
  • 07bed79 Fix deprecation warnings for Python 3.10 ssl module
  • +
  • d725a9b Add Python 3.10 to GitHub Actions
  • +
  • 339ad34 Use pytest==6.2.4 on Python 3.10+
  • +
  • f271c9c Apply latest Black formatting
  • +
  • 1884878 [1.26] Properly proxy EOF on the SSLTransport test suite
  • +
  • a891304 Release 1.26.4
  • +
  • 8d65ea1 Merge pull request from GHSA-5phf-pp7p-vc2r
  • +
  • Additional commits viewable in compare view
  • +
+
+
+ + +[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=urllib3&package-manager=pip&previous-version=1.24.3&new-version=1.26.5)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) + +Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. + +[//]: # (dependabot-automerge-start) +[//]: # (dependabot-automerge-end) + +--- + +
+Dependabot commands and options +
+ +You can trigger Dependabot actions by commenting on this PR: +- `@dependabot rebase` will rebase this PR +- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it +- `@dependabot merge` will merge this PR after your CI passes on it +- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it +- `@dependabot cancel merge` will cancel a previously requested merge and block automerging +- `@dependabot reopen` will reopen this PR if it is closed +- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually +- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) +- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language +- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language +- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language +- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language + +You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts). + +
","dependabot[bot]","closed",,,"dependencies","","","" 42,"Update Pedestrian Data folder","Pedestrian Data folder including the relevant notebooks and visualizations.","dplem","closed",,,"","","","" 41,"Push request redo preprocessing files","","KarinaLopez19","closed",,,"","","AlbertUlysses","" 40,"Extend Mapillary API wrapper","These are new features for the python wrapper for Mapillary's API to allow it to: From 807e2f022fa4447a942da46262ac308f4dba3075 Mon Sep 17 00:00:00 2001 From: n2020h <72112832+n2020h@users.noreply.github.com> Date: Mon, 12 Aug 2024 17:21:42 -0400 Subject: [PATCH 41/49] Update pull_requests_to_csv.yml removed description field (.body) --- .github/workflows/pull_requests_to_csv.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/pull_requests_to_csv.yml b/.github/workflows/pull_requests_to_csv.yml index 60a2be4..cfc6fac 100644 --- a/.github/workflows/pull_requests_to_csv.yml +++ b/.github/workflows/pull_requests_to_csv.yml @@ -48,7 +48,7 @@ jobs: # Generate pull requests CSV including linked issues - name: Generate pull requests CSV including linked issues run: | - echo "PR Number,Title,Description,Author,State,Number of Commits,Number of Files Changed,Labels,Assignees,Reviewers,Linked Issues" > pull_requests.csv + echo "PR Number,Title,Author,State,Number of Commits,Number of Files Changed,Labels,Assignees,Reviewers,Linked Issues" > pull_requests.csv for pr_number in $(jq -r '.[].number' pulls.json); do timeline_file="timeline_$pr_number.json" @@ -64,7 +64,6 @@ jobs: '.[] | select(.number == '$pr_number') | [ .number, .title, - .body, .user.login, .state, .commits, From 20a3fae38c94cfb54096c1c27d462e2b8fb42ace Mon Sep 17 00:00:00 2001 From: Automated Date: Mon, 12 Aug 2024 21:22:35 +0000 Subject: [PATCH 42/49] Latest pull requests data: Mon Aug 12 21:22:35 UTC 2024 --- pull_requests.csv | 1427 ++++----------------------------------------- 1 file changed, 101 insertions(+), 1326 deletions(-) diff --git a/pull_requests.csv b/pull_requests.csv index 6822c75..628e482 100644 --- a/pull_requests.csv +++ b/pull_requests.csv @@ -1,1326 +1,101 @@ -PR Number,Title,Description,Author,State,Number of Commits,Number of Files Changed,Labels,Assignees,Reviewers,Linked Issues -205,"Added data loading and cleaning Jupyter notebook","- Initial implementation of data loading/ saving and cleaning. -- Improvements planned for future versions.","mru-hub","closed",,,"","","","" -201,"feat: City of Los Angeles Evictions #179","This commit introduces basic EDA on the LA evictions data, It introduces explode/splitting of Just Cause column for effective analysis - -starting #179 ","rahul897","open",,,"","","","" -199,"177 create 311 data csv files that can be accessed through a jupyter notebook","Cleaning rules document from the 311-data","mru-hub","closed",,,"","","salice","" -197,"N2020h issues to csv",,"n2020h","open",,,"","n2020h","","" -196,"186 Adding Crime Data",,"dolla24","closed",,,"","dolla24","","" -195,"fix name of image to work with Windows",,"salice","closed",,,"","","","" -192,"Create issues-to-csv.yml","testing feature branch with draft pull request","n2020h","closed",,,"","","","" -188,"Moving Workshop-Feedback_Bigram_Analysis.ipynb from access-the-data repo","Partially fixes https://github.com/hackforla/website/issues/6139 - -### What changes did you make? -- Adding Workshop-Feedback_Bigram_Analysis.ipynb from access-the-data repo (commit: https://github.com/hackforla/access-the-data/commit/62609601277509b0473bbf8f6dbf1ef735430c84) to this repo - -### Why did you make the changes? -- Requested by Bonnie per [this comment](https://github.com/hackforla/website/issues/5127#issuecomment-1776306588) -- AJ Price, Access the Data's PM, didn't want Jupyter Notebook to show up as a Language for the project, so Bonnie recommended moving the Workshop-Feedback_Bigram_Analysis.ipynb file to the Data Science repo. - -### Additional Notes -- I recommend using rebase merging to preserve who authored the file. -- To see examples of the commit history of the various methods of merging, see these branches in my forked repo: - - [Using rebase & merge](https://github.com/JessicaLucindaCheng/data-science/commits/rebase-merge): Recommended - - [Using merge commit](https://github.com/JessicaLucindaCheng/data-science/commits/merge-commit): Okay but results in 2 commits - - [Using squash & merge](https://github.com/JessicaLucindaCheng/data-science/commits/squash-merge): Not recommended because it doesn't preserve author's credit","JessicaLucindaCheng","closed",,,"","","","5127, 2607, 6139, 296" -187,"Moving Workshop-Feedback_Bigram_Analysis.ipynb from access-the-data repo","Fixes N/A - - -### What changes did you make? -- Adding Workshop-Feedback_Bigram_Analysis.ipynb from access-the-data repo (commit: https://github.com/hackforla/access-the-data/commit/62609601277509b0473bbf8f6dbf1ef735430c84) to this repo - -### Why did you make the changes? -- Requested by Bonnie per [this comment](https://github.com/hackforla/website/issues/5127#issuecomment-1776306588) -- AJ Price, Access the Data's PM, didn't want Jupyter Notebook to show up as a Language for the project, so Bonnie recommended moving the Workshop-Feedback_Bigram_Analysis.ipynb file to the Data Science repo. - -### Additional Notes -- I recommend using rebase merging to preserve who authored the file. -- To see examples of the commit history of the various methods of merging, see these branches in my forked repo: - - [Using rebase & merge](https://github.com/JessicaLucindaCheng/data-science/commits/rebase-merge): Recommended - - [Using merge commit](https://github.com/JessicaLucindaCheng/data-science/commits/merge-commit): Okay but results in 2 commits - - [Using squash & merge](https://github.com/JessicaLucindaCheng/data-science/commits/squash-merge): Not recommended because it doesn't preserve author's credit","JessicaLucindaCheng","closed",,,"","","","" -186,"Lalla la crime analysis 2",,"Lalla22","open",,,"","","salice","" -185,"Create README.md",,"Lalla22","closed",,,"","","","" -184,"git c",,"Lalla22","closed",,,"","","","" -176,"build(deps): bump cryptography from 3.3.2 to 39.0.1 in /311-data/mapillarywrapper","Bumps [cryptography](https://github.com/pyca/cryptography) from 3.3.2 to 39.0.1. -
-Changelog -

Sourced from cryptography's changelog.

-
-

39.0.1 - 2023-02-07

-

-* **SECURITY ISSUE** - Fixed a bug where ``Cipher.update_into`` accepted Python
-  buffer protocol objects, but allowed immutable buffers. **CVE-2023-23931**
-* Updated Windows, macOS, and Linux wheels to be compiled with OpenSSL 3.0.8.
-

.. _v39-0-0:

-

39.0.0 - 2023-01-01 -

-
    -
  • BACKWARDS INCOMPATIBLE: Support for OpenSSL 1.1.0 has been removed. -Users on older version of OpenSSL will need to upgrade.
  • -
  • BACKWARDS INCOMPATIBLE: Dropped support for LibreSSL < 3.5. The new -minimum LibreSSL version is 3.5.0. Going forward our policy is to support -versions of LibreSSL that are available in versions of OpenBSD that are -still receiving security support.
  • -
  • BACKWARDS INCOMPATIBLE: Removed the encode_point and -from_encoded_point methods on -:class:~cryptography.hazmat.primitives.asymmetric.ec.EllipticCurvePublicNumbers, -which had been deprecated for several years. -:meth:~cryptography.hazmat.primitives.asymmetric.ec.EllipticCurvePublicKey.public_bytes -and -:meth:~cryptography.hazmat.primitives.asymmetric.ec.EllipticCurvePublicKey.from_encoded_point -should be used instead.
  • -
  • BACKWARDS INCOMPATIBLE: Support for using MD5 or SHA1 in -:class:~cryptography.x509.CertificateBuilder, other X.509 builders, and -PKCS7 has been removed.
  • -
  • BACKWARDS INCOMPATIBLE: Dropped support for macOS 10.10 and 10.11, macOS -users must upgrade to 10.12 or newer.
  • -
  • ANNOUNCEMENT: The next version of cryptography (40.0) will change -the way we link OpenSSL. This will only impact users who build -cryptography from source (i.e., not from a wheel), and specify their -own version of OpenSSL. For those users, the CFLAGS, LDFLAGS, -INCLUDE, LIB, and CRYPTOGRAPHY_SUPPRESS_LINK_FLAGS environment -variables will no longer be respected. Instead, users will need to -configure their builds as documented here_.
  • -
  • Added support for -:ref:disabling the legacy provider in OpenSSL 3.0.x<legacy-provider>.
  • -
  • Added support for disabling RSA key validation checks when loading RSA -keys via -:func:~cryptography.hazmat.primitives.serialization.load_pem_private_key, -:func:~cryptography.hazmat.primitives.serialization.load_der_private_key, -and -:meth:~cryptography.hazmat.primitives.asymmetric.rsa.RSAPrivateNumbers.private_key. -This speeds up key loading but is :term:unsafe if you are loading potentially -attacker supplied keys.
  • -
  • Significantly improved performance for -:class:~cryptography.hazmat.primitives.ciphers.aead.ChaCha20Poly1305
  • -
- -
-

... (truncated)

-
-
-Commits - -
-
- - -[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=cryptography&package-manager=pip&previous-version=3.3.2&new-version=39.0.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) - -Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. - -[//]: # (dependabot-automerge-start) -[//]: # (dependabot-automerge-end) - ---- - -
-Dependabot commands and options -
- -You can trigger Dependabot actions by commenting on this PR: -- `@dependabot rebase` will rebase this PR -- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it -- `@dependabot merge` will merge this PR after your CI passes on it -- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it -- `@dependabot cancel merge` will cancel a previously requested merge and block automerging -- `@dependabot reopen` will reopen this PR if it is closed -- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually -- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) -- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) -- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) -- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language -- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language -- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language -- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language - -You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts). - -
","dependabot[bot]","open",,,"dependencies","","","" -175,"build(deps): bump certifi from 2021.5.30 to 2022.12.7 in /LAANE","Bumps [certifi](https://github.com/certifi/python-certifi) from 2021.5.30 to 2022.12.7. -
-Commits - -
-
- - -[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=certifi&package-manager=pip&previous-version=2021.5.30&new-version=2022.12.7)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) - -Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. - -[//]: # (dependabot-automerge-start) -[//]: # (dependabot-automerge-end) - ---- - -
-Dependabot commands and options -
- -You can trigger Dependabot actions by commenting on this PR: -- `@dependabot rebase` will rebase this PR -- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it -- `@dependabot merge` will merge this PR after your CI passes on it -- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it -- `@dependabot cancel merge` will cancel a previously requested merge and block automerging -- `@dependabot reopen` will reopen this PR if it is closed -- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually -- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) -- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) -- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) -- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language -- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language -- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language -- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language - -You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts). - -
","dependabot[bot]","open",,,"dependencies","","","" -174,"build(deps): bump certifi from 2020.6.20 to 2022.12.7 in /311-data/mapillarywrapper","Bumps [certifi](https://github.com/certifi/python-certifi) from 2020.6.20 to 2022.12.7. -
-Commits - -
-
- - -[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=certifi&package-manager=pip&previous-version=2020.6.20&new-version=2022.12.7)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) - -Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. - -[//]: # (dependabot-automerge-start) -[//]: # (dependabot-automerge-end) - ---- - -
-Dependabot commands and options -
- -You can trigger Dependabot actions by commenting on this PR: -- `@dependabot rebase` will rebase this PR -- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it -- `@dependabot merge` will merge this PR after your CI passes on it -- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it -- `@dependabot cancel merge` will cancel a previously requested merge and block automerging -- `@dependabot reopen` will reopen this PR if it is closed -- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually -- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) -- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) -- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) -- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language -- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language -- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language -- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language - -You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts). - -
","dependabot[bot]","open",,,"dependencies","","","" -173,"build(deps): bump nbconvert from 5.6.1 to 6.5.1 in /311-data/mapillarywrapper","Bumps [nbconvert](https://github.com/jupyter/nbconvert) from 5.6.1 to 6.5.1. -
-Release notes -

Sourced from nbconvert's releases.

-
-

Release 6.5.1

-

No release notes provided.

-

6.5.0

-

What's Changed

- -

New Contributors

- -

Full Changelog: https://github.com/jupyter/nbconvert/compare/6.4.5...6.5

-

6.4.3

-

What's Changed

- -

New Contributors

- -

Full Changelog: https://github.com/jupyter/nbconvert/compare/6.4.2...6.4.3

-

6.4.0

-

What's Changed

- -

New Contributors

- -
-

... (truncated)

-
-
-Commits - -
-
- - -[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=nbconvert&package-manager=pip&previous-version=5.6.1&new-version=6.5.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) - -Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. - -[//]: # (dependabot-automerge-start) -[//]: # (dependabot-automerge-end) - ---- - -
-Dependabot commands and options -
- -You can trigger Dependabot actions by commenting on this PR: -- `@dependabot rebase` will rebase this PR -- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it -- `@dependabot merge` will merge this PR after your CI passes on it -- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it -- `@dependabot cancel merge` will cancel a previously requested merge and block automerging -- `@dependabot reopen` will reopen this PR if it is closed -- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually -- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) -- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) -- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) -- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language -- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language -- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language -- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language - -You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts). - -
","dependabot[bot]","open",,,"dependencies","","","172" -172,"build(deps): bump nbconvert from 5.6.1 to 6.3.0 in /311-data/mapillarywrapper","Bumps [nbconvert](https://github.com/jupyter/nbconvert) from 5.6.1 to 6.3.0. -
-Commits - -
-
- - -[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=nbconvert&package-manager=pip&previous-version=5.6.1&new-version=6.3.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) - -Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. - -[//]: # (dependabot-automerge-start) -[//]: # (dependabot-automerge-end) - ---- - -
-Dependabot commands and options -
- -You can trigger Dependabot actions by commenting on this PR: -- `@dependabot rebase` will rebase this PR -- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it -- `@dependabot merge` will merge this PR after your CI passes on it -- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it -- `@dependabot cancel merge` will cancel a previously requested merge and block automerging -- `@dependabot reopen` will reopen this PR if it is closed -- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually -- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) -- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) -- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) -- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language -- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language -- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language -- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language - -You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts). - -
","dependabot[bot]","closed",,,"dependencies","","","" -171,"build(deps): bump mistune from 0.8.4 to 2.0.3 in /311-data/mapillarywrapper","Bumps [mistune](https://github.com/lepture/mistune) from 0.8.4 to 2.0.3. -
-Release notes -

Sourced from mistune's releases.

-
-

Version 2.0.2

-

Fix escape_url via lepture/mistune#295

-

Version 2.0.1

-

Fix XSS for image link syntax.

-

Version 2.0.0

-

First release of Mistune v2.

-

Version 2.0.0 RC1

-

In this release, we have a Security Fix for harmful links.

-

Version 2.0.0 Alpha 1

-

This is the first release of v2. An alpha version for users to have a preview of the new mistune.

-
-
-
-Changelog -

Sourced from mistune's changelog.

-
-

Changelog

-

Here is the full history of mistune v2.

-

Version 2.0.4

-

-Released on Jul 15, 2022
-
    -
  • Fix url plugin in &lt;a&gt; tag
  • -
  • Fix * formatting
  • -
-

Version 2.0.3 -

-

Released on Jun 27, 2022

-
    -
  • Fix table plugin
  • -
  • Security fix for CVE-2022-34749
  • -
-

Version 2.0.2

-

-Released on Jan 14, 2022
-

Fix escape_url

-

Version 2.0.1 -

-

Released on Dec 30, 2021

-

XSS fix for image link syntax.

-

Version 2.0.0

-

-Released on Dec 5, 2021
-

This is the first non-alpha release of mistune v2.

-

Version 2.0.0rc1 -

-

Released on Feb 16, 2021

-

Version 2.0.0a6

-

-</tr></table> 
-
-
-

... (truncated)

-
-
-Commits -
    -
  • 3f422f1 Version bump 2.0.3
  • -
  • a6d4321 Fix asteris emphasis regex CVE-2022-34749
  • -
  • 5638e46 Merge pull request #307 from jieter/patch-1
  • -
  • 0eba471 Fix typo in guide.rst
  • -
  • 61e9337 Fix table plugin
  • -
  • 76dec68 Add documentation for renderer heading when TOC enabled
  • -
  • 799cd11 Version bump 2.0.2
  • -
  • babb0cf Merge pull request #295 from dairiki/bug.escape_url
  • -
  • fc2cd53 Make mistune.util.escape_url less aggressive
  • -
  • 3e8d352 Version bump 2.0.1
  • -
  • Additional commits viewable in compare view
  • -
-
-
- - -[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=mistune&package-manager=pip&previous-version=0.8.4&new-version=2.0.3)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) - -Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. - -[//]: # (dependabot-automerge-start) -[//]: # (dependabot-automerge-end) - ---- - -
-Dependabot commands and options -
- -You can trigger Dependabot actions by commenting on this PR: -- `@dependabot rebase` will rebase this PR -- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it -- `@dependabot merge` will merge this PR after your CI passes on it -- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it -- `@dependabot cancel merge` will cancel a previously requested merge and block automerging -- `@dependabot reopen` will reopen this PR if it is closed -- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually -- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) -- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) -- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) -- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language -- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language -- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language -- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language - -You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts). - -
","dependabot[bot]","open",,,"dependencies","","","" -168,"Updating label analysis jupyter notebook with co-occurrence analysis",,"rbianchetti","open",,,"","","","" -167,"Merge pull request #164 from hackforla/160-survey-repo-labels","A python script that gets all the labels used in open issues across a…","codemamma","open",,,"","","","" -166,"build(deps): bump numpy from 1.21.2 to 1.22.0 in /LAANE","Bumps [numpy](https://github.com/numpy/numpy) from 1.21.2 to 1.22.0. -
-Release notes -

Sourced from numpy's releases.

-
-

v1.22.0

-

NumPy 1.22.0 Release Notes

-

NumPy 1.22.0 is a big release featuring the work of 153 contributors -spread over 609 pull requests. There have been many improvements, -highlights are:

-
    -
  • Annotations of the main namespace are essentially complete. Upstream -is a moving target, so there will likely be further improvements, -but the major work is done. This is probably the most user visible -enhancement in this release.
  • -
  • A preliminary version of the proposed Array-API is provided. This is -a step in creating a standard collection of functions that can be -used across application such as CuPy and JAX.
  • -
  • NumPy now has a DLPack backend. DLPack provides a common interchange -format for array (tensor) data.
  • -
  • New methods for quantile, percentile, and related functions. The -new methods provide a complete set of the methods commonly found in -the literature.
  • -
  • A new configurable allocator for use by downstream projects.
  • -
-

These are in addition to the ongoing work to provide SIMD support for -commonly used functions, improvements to F2PY, and better documentation.

-

The Python versions supported in this release are 3.8-3.10, Python 3.7 -has been dropped. Note that 32 bit wheels are only provided for Python -3.8 and 3.9 on Windows, all other wheels are 64 bits on account of -Ubuntu, Fedora, and other Linux distributions dropping 32 bit support. -All 64 bit wheels are also linked with 64 bit integer OpenBLAS, which should fix -the occasional problems encountered by folks using truly huge arrays.

-

Expired deprecations

-

Deprecated numeric style dtype strings have been removed

-

Using the strings "Bytes0", "Datetime64", "Str0", "Uint32", -and "Uint64" as a dtype will now raise a TypeError.

-

(gh-19539)

-

Expired deprecations for loads, ndfromtxt, and mafromtxt in npyio

-

numpy.loads was deprecated in v1.15, with the recommendation that -users use pickle.loads instead. ndfromtxt and mafromtxt were both -deprecated in v1.17 - users should use numpy.genfromtxt instead with -the appropriate value for the usemask parameter.

-

(gh-19615)

- -
-

... (truncated)

-
-
-Commits - -
-
- - -[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=numpy&package-manager=pip&previous-version=1.21.2&new-version=1.22.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) - -Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. - -[//]: # (dependabot-automerge-start) -[//]: # (dependabot-automerge-end) - ---- - -
-Dependabot commands and options -
- -You can trigger Dependabot actions by commenting on this PR: -- `@dependabot rebase` will rebase this PR -- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it -- `@dependabot merge` will merge this PR after your CI passes on it -- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it -- `@dependabot cancel merge` will cancel a previously requested merge and block automerging -- `@dependabot reopen` will reopen this PR if it is closed -- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually -- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) -- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) -- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) -- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language -- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language -- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language -- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language - -You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts). - -
","dependabot[bot]","open",,,"dependencies","","","" -165,"build(deps): bump numpy from 1.18.5 to 1.22.0 in /311-data/mapillarywrapper","Bumps [numpy](https://github.com/numpy/numpy) from 1.18.5 to 1.22.0. -
-Release notes -

Sourced from numpy's releases.

-
-

v1.22.0

-

NumPy 1.22.0 Release Notes

-

NumPy 1.22.0 is a big release featuring the work of 153 contributors -spread over 609 pull requests. There have been many improvements, -highlights are:

-
    -
  • Annotations of the main namespace are essentially complete. Upstream -is a moving target, so there will likely be further improvements, -but the major work is done. This is probably the most user visible -enhancement in this release.
  • -
  • A preliminary version of the proposed Array-API is provided. This is -a step in creating a standard collection of functions that can be -used across application such as CuPy and JAX.
  • -
  • NumPy now has a DLPack backend. DLPack provides a common interchange -format for array (tensor) data.
  • -
  • New methods for quantile, percentile, and related functions. The -new methods provide a complete set of the methods commonly found in -the literature.
  • -
  • A new configurable allocator for use by downstream projects.
  • -
-

These are in addition to the ongoing work to provide SIMD support for -commonly used functions, improvements to F2PY, and better documentation.

-

The Python versions supported in this release are 3.8-3.10, Python 3.7 -has been dropped. Note that 32 bit wheels are only provided for Python -3.8 and 3.9 on Windows, all other wheels are 64 bits on account of -Ubuntu, Fedora, and other Linux distributions dropping 32 bit support. -All 64 bit wheels are also linked with 64 bit integer OpenBLAS, which should fix -the occasional problems encountered by folks using truly huge arrays.

-

Expired deprecations

-

Deprecated numeric style dtype strings have been removed

-

Using the strings "Bytes0", "Datetime64", "Str0", "Uint32", -and "Uint64" as a dtype will now raise a TypeError.

-

(gh-19539)

-

Expired deprecations for loads, ndfromtxt, and mafromtxt in npyio

-

numpy.loads was deprecated in v1.15, with the recommendation that -users use pickle.loads instead. ndfromtxt and mafromtxt were both -deprecated in v1.17 - users should use numpy.genfromtxt instead with -the appropriate value for the usemask parameter.

-

(gh-19615)

- -
-

... (truncated)

-
-
-Commits - -
-
- - -[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=numpy&package-manager=pip&previous-version=1.18.5&new-version=1.22.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) - -Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. - -[//]: # (dependabot-automerge-start) -[//]: # (dependabot-automerge-end) - ---- - -
-Dependabot commands and options -
- -You can trigger Dependabot actions by commenting on this PR: -- `@dependabot rebase` will rebase this PR -- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it -- `@dependabot merge` will merge this PR after your CI passes on it -- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it -- `@dependabot cancel merge` will cancel a previously requested merge and block automerging -- `@dependabot reopen` will reopen this PR if it is closed -- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually -- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) -- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) -- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) -- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language -- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language -- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language -- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language - -You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts). - -
","dependabot[bot]","open",,,"dependencies","","","142" -164,"A python script that gets all the labels used in open issues across a…","…n organization(s) via GitHub API","rbianchetti","closed",,,"","","","26" -159,"Linking latest pedestrian safety report presentation","This replaces a previous powerpoint presentation stored in the repo with links to the current report and presentation saved in Hack for LA's shared google drive.","henrykaplan","open",,,"","","akhaleghi","" -158,"Data Science logo links and image files","The latest version of the Hack for LA data science logo is being worked on in Google Drive. I've updated the directory in the git repo with links to the latest and with png and svg versions of the latest logo files.","henrykaplan","closed",,,"","","akhaleghi","140" -142,"build(deps): bump numpy from 1.18.5 to 1.21.0 in /311-data/mapillarywrapper","Bumps [numpy](https://github.com/numpy/numpy) from 1.18.5 to 1.21.0. -
-Release notes -

Sourced from numpy's releases.

-
-

v1.21.0

-

NumPy 1.21.0 Release Notes

-

The NumPy 1.21.0 release highlights are

-
    -
  • continued SIMD work covering more functions and platforms,
  • -
  • initial work on the new dtype infrastructure and casting,
  • -
  • universal2 wheels for Python 3.8 and Python 3.9 on Mac,
  • -
  • improved documentation,
  • -
  • improved annotations,
  • -
  • new PCG64DXSM bitgenerator for random numbers.
  • -
-

In addition there are the usual large number of bug fixes and other -improvements.

-

The Python versions supported for this release are 3.7-3.9. Official -support for Python 3.10 will be added when it is released.

-

:warning: Warning: there are unresolved problems compiling NumPy 1.21.0 with gcc-11.1 .

-
    -
  • Optimization level -O3 results in many wrong warnings when running the tests.
  • -
  • On some hardware NumPy will hang in an infinite loop.
  • -
-

New functions

-

Add PCG64DXSM BitGenerator

-

Uses of the PCG64 BitGenerator in a massively-parallel context have -been shown to have statistical weaknesses that were not apparent at the -first release in numpy 1.17. Most users will never observe this weakness -and are safe to continue to use PCG64. We have introduced a new -PCG64DXSM BitGenerator that will eventually become the new default -BitGenerator implementation used by default_rng in future releases. -PCG64DXSM solves the statistical weakness while preserving the -performance and the features of PCG64.

-

See upgrading-pcg64 for more details.

-

(gh-18906)

-

Expired deprecations

-
    -
  • The shape argument numpy.unravel_index cannot be -passed as dims keyword argument anymore. (Was deprecated in NumPy -1.16.)
  • -
- -
-

... (truncated)

-
-
-Commits -
    -
  • b235f9e Merge pull request #19283 from charris/prepare-1.21.0-release
  • -
  • 34aebc2 MAINT: Update 1.21.0-notes.rst
  • -
  • 493b64b MAINT: Update 1.21.0-changelog.rst
  • -
  • 07d7e72 MAINT: Remove accidentally created directory.
  • -
  • 032fca5 Merge pull request #19280 from charris/backport-19277
  • -
  • 7d25b81 BUG: Fix refcount leak in ResultType
  • -
  • fa5754e BUG: Add missing DECREF in new path
  • -
  • 61127bb Merge pull request #19268 from charris/backport-19264
  • -
  • 143d45f Merge pull request #19269 from charris/backport-19228
  • -
  • d80e473 BUG: Removed typing for == and != in dtypes
  • -
  • Additional commits viewable in compare view
  • -
-
-
- - -[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=numpy&package-manager=pip&previous-version=1.18.5&new-version=1.21.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) - -Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. - -[//]: # (dependabot-automerge-start) -[//]: # (dependabot-automerge-end) - ---- - -
-Dependabot commands and options -
- -You can trigger Dependabot actions by commenting on this PR: -- `@dependabot rebase` will rebase this PR -- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it -- `@dependabot merge` will merge this PR after your CI passes on it -- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it -- `@dependabot cancel merge` will cancel a previously requested merge and block automerging -- `@dependabot reopen` will reopen this PR if it is closed -- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually -- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) -- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) -- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) -- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language -- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language -- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language -- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language - -You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts). - -
","dependabot[bot]","closed",,,"dependencies","","","" -139,"Removed spaces from all filenames in pedestrian data directory","Changed filenames of all output files and graphics. No changes to file contents — these are final graphics and reports, and aren't aren't referenced by file path anywhere.","henrykaplan","closed",,,"","","","" -132,"Pedestrian safety","Added readme documentation for downloading and organizing data, running Jupyter notebooks, and reorganized analysis files.","henrykaplan","closed",,,"","","","" -123,"Create a new issue template for epics",,"akhaleghi","closed",,,"","","","120" -122,"Pedestrian safety draft visualizations and presentation","Further simplified and clarified maps and traffic accident charts, added these new charts and related discussion to the presentation file.","henrykaplan","closed",,,"","","AlbertUlysses","" -117,"Pedestrian safety","- Add optional parameter to mapillary wrapper client to allow data requests of recent data only, to quickly update existing datasets. -- Better maps and visualizations. -- Use DBScan clustering algorithm to find clusters of traffic accidents (simplest way to roughly group accidents at specific intersections, as the accident data is mostly based around intersections). -- Updated report ppt for August 5 presentation to data science group","henrykaplan","closed",,,"","","","" -116,"webscraping folder initial commit","[Edited] - -This PR is a stand alone folder added to the 311-data folder. It contains a webscraping script for the tech stacks of each NC survey. See issue [44](https://github.com/hackforla/data-science/issues/44). The readme contains a link to the google sheets table of the tech used for each website.","rajindermavi","closed",,,"","","","" -115,"docs","updated readme to include sample queries that LAANE is interested in doing against the database","AlbertUlysses","closed",,,"","","","" -114,"docs","Update README/todos","AlbertUlysses","closed",,,"","","","" -113,"docs","Updated misc docs","AlbertUlysses","closed",,,"","","","" -112,"build(deps): bump urllib3 from 1.24.3 to 1.26.5 in /LAANE","Bumps [urllib3](https://github.com/urllib3/urllib3) from 1.24.3 to 1.26.5. -
-Release notes -

Sourced from urllib3's releases.

-
-

1.26.5

-

:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap

-
    -
  • Fixed deprecation warnings emitted in Python 3.10.
  • -
  • Updated vendored six library to 1.16.0.
  • -
  • Improved performance of URL parser when splitting the authority component.
  • -
-

If you or your organization rely on urllib3 consider supporting us via GitHub Sponsors

-

1.26.4

-

:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap

-
    -
  • Changed behavior of the default SSLContext when connecting to HTTPS proxy during HTTPS requests. The default SSLContext now sets check_hostname=True.
  • -
-

If you or your organization rely on urllib3 consider supporting us via GitHub Sponsors

-

1.26.3

-

:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap

-
    -
  • -

    Fixed bytes and string comparison issue with headers (Pull #2141)

    -
  • -
  • -

    Changed ProxySchemeUnknown error message to be more actionable if the user supplies a proxy URL without a scheme (Pull #2107)

    -
  • -
-

If you or your organization rely on urllib3 consider supporting us via GitHub Sponsors

-

1.26.2

-

:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap

-
    -
  • Fixed an issue where wrap_socket and CERT_REQUIRED wouldn't be imported properly on Python 2.7.8 and earlier (Pull #2052)
  • -
-

1.26.1

-

:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap

-
    -
  • Fixed an issue where two User-Agent headers would be sent if a User-Agent header key is passed as bytes (Pull #2047)
  • -
-

1.26.0

-

:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap

-
    -
  • -

    Added support for HTTPS proxies contacting HTTPS servers (Pull #1923, Pull #1806)

    -
  • -
  • -

    Deprecated negotiating TLSv1 and TLSv1.1 by default. Users that -still wish to use TLS earlier than 1.2 without a deprecation warning -should opt-in explicitly by setting ssl_version=ssl.PROTOCOL_TLSv1_1 (Pull #2002) -Starting in urllib3 v2.0: Connections that receive a DeprecationWarning will fail

    -
  • -
  • -

    Deprecated Retry options Retry.DEFAULT_METHOD_WHITELIST, Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST -and Retry(method_whitelist=...) in favor of Retry.DEFAULT_ALLOWED_METHODS, -Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT, and Retry(allowed_methods=...) -(Pull #2000) Starting in urllib3 v2.0: Deprecated options will be removed

    -
  • -
- -
-

... (truncated)

-
-
-Changelog -

Sourced from urllib3's changelog.

-
-

1.26.5 (2021-05-26)

-
    -
  • Fixed deprecation warnings emitted in Python 3.10.
  • -
  • Updated vendored six library to 1.16.0.
  • -
  • Improved performance of URL parser when splitting -the authority component.
  • -
-

1.26.4 (2021-03-15)

-
    -
  • Changed behavior of the default SSLContext when connecting to HTTPS proxy -during HTTPS requests. The default SSLContext now sets check_hostname=True.
  • -
-

1.26.3 (2021-01-26)

-
    -
  • -

    Fixed bytes and string comparison issue with headers (Pull #2141)

    -
  • -
  • -

    Changed ProxySchemeUnknown error message to be -more actionable if the user supplies a proxy URL without -a scheme. (Pull #2107)

    -
  • -
-

1.26.2 (2020-11-12)

-
    -
  • Fixed an issue where wrap_socket and CERT_REQUIRED wouldn't -be imported properly on Python 2.7.8 and earlier (Pull #2052)
  • -
-

1.26.1 (2020-11-11)

-
    -
  • Fixed an issue where two User-Agent headers would be sent if a -User-Agent header key is passed as bytes (Pull #2047)
  • -
-

1.26.0 (2020-11-10)

-
    -
  • -

    NOTE: urllib3 v2.0 will drop support for Python 2. -Read more in the v2.0 Roadmap <https://urllib3.readthedocs.io/en/latest/v2-roadmap.html>_.

    -
  • -
  • -

    Added support for HTTPS proxies contacting HTTPS servers (Pull #1923, Pull #1806)

    -
  • -
  • -

    Deprecated negotiating TLSv1 and TLSv1.1 by default. Users that -still wish to use TLS earlier than 1.2 without a deprecation warning

    -
  • -
- -
-

... (truncated)

-
-
-Commits -
    -
  • d161647 Release 1.26.5
  • -
  • 2d4a3fe Improve performance of sub-authority splitting in URL
  • -
  • 2698537 Update vendored six to 1.16.0
  • -
  • 07bed79 Fix deprecation warnings for Python 3.10 ssl module
  • -
  • d725a9b Add Python 3.10 to GitHub Actions
  • -
  • 339ad34 Use pytest==6.2.4 on Python 3.10+
  • -
  • f271c9c Apply latest Black formatting
  • -
  • 1884878 [1.26] Properly proxy EOF on the SSLTransport test suite
  • -
  • a891304 Release 1.26.4
  • -
  • 8d65ea1 Merge pull request from GHSA-5phf-pp7p-vc2r
  • -
  • Additional commits viewable in compare view
  • -
-
-
- - -[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=urllib3&package-manager=pip&previous-version=1.24.3&new-version=1.26.5)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) - -Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. - -[//]: # (dependabot-automerge-start) -[//]: # (dependabot-automerge-end) - ---- - -
-Dependabot commands and options -
- -You can trigger Dependabot actions by commenting on this PR: -- `@dependabot rebase` will rebase this PR -- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it -- `@dependabot merge` will merge this PR after your CI passes on it -- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it -- `@dependabot cancel merge` will cancel a previously requested merge and block automerging -- `@dependabot reopen` will reopen this PR if it is closed -- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually -- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) -- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) -- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) -- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language -- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language -- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language -- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language - -You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts). - -
","dependabot[bot]","closed",,,"dependencies","","","" -111,"Docs!","Updated docs like readme and add extra notes to some other files","AlbertUlysses","closed",,,"","","","" -105,"debug","debugged some code that was entering incorrect addresses","AlbertUlysses","closed",,,"","","","" -104,"docs/clean up","updated README and removed some old files.","AlbertUlysses","closed",,,"","","","" -103,"feat: assessor script","Last script, passes tests refactored a bit","AlbertUlysses","closed",,,"","","","" -102,"docs","Update Readme -removed a lot of personal notes aimed for me during script writing","AlbertUlysses","closed",,,"","","","" -101,"debug categorically inelligible","debug categorically inelligibl data","AlbertUlysses","closed",,,"","","","" -100,"New scripts + debug","added some code that debugs the ofs and luxly datasets. -Added a new script that handles warnings and citations.","AlbertUlysses","closed",,,"","","","" -99,"bugfix","fixed bugs across 4 files that dealth with addresses that returned 0 for zipcode instead of a 5 number digit","AlbertUlysses","closed",,,"","","","" -98,"feat: new script for warning","This script is for warning files that don't have only addresses and dates.","AlbertUlysses","closed",,,"","","","" -97,"Two commits","First commit fixes the warning script file. -The second commit adds an extra line describe what the script is for. -","AlbertUlysses","closed",,,"","","","" -96,"feat: new processesing script","script for airbnb reviews is complete.","AlbertUlysses","closed",,,"","","","" -95,"feat: airbnb dataset script","airbnb script that uploads listings and host information ","AlbertUlysses","closed",,,"","","","" -93,"debug one fine stay scripts","debug one fine stay scripts for address2 errors","AlbertUlysses","closed",,,"","","","" -92,"debug","debugged the data from one fine stay.","AlbertUlysses","closed",,,"","","","" -91,"debug","Did some debugging for luxly platform script","AlbertUlysses","closed",,,"","","","" -90,"fixbug in hsodenials","hsodenials bugs are remved, hsodenials script is good to go.","AlbertUlysses","closed",,,"","","","" -89,"debug","Debugged the hso_registrant enteries.","AlbertUlysses","closed",,,"","","","" -88,"bugfix: fixed exempt script","fixed exempt table to match the rest of the database","AlbertUlysses","closed",,,"","","","" -87,"fixbug: fixed bug in categorically inelligible","fixed categoically ineligible bug","AlbertUlysses","closed",,,"","","","" -86,"bug fix: fixed bug for complaints data","fixed complaints data to not have nulls and to not allow any state that don't use two letter abbreviation. ","AlbertUlysses","closed",,,"","","","" -85,"refactor/debug: debugging each script","Refactored tot script to make sure it is uploading to sqlite correctly.","AlbertUlysses","closed",,,"","","","" -84,"re-refactor normalize address","normalize address wrapper returns none as default again because that's how the library handle it, shouldn't return two different options.","AlbertUlysses","closed",,,"","","","" -83,"refactored","refactoring some transformation files to fix bug","AlbertUlysses","closed",,,"","","","" -82,"feat: hso_registrant script","HSO_Registrant script is complete. -","AlbertUlysses","closed",,,"","","","" -81,"refactor: no code change files moved","I moved some files around to better reflect where they belong. -For exapmle, assessor table file will be move to processingscripts folder because the code is unique to the assessor dataset. -However, normalize_address_wrapper will stay in the transformations folder because multiple scripts use this. -Asssesor file (and builds) will now have the custom code and later include the processing code as well.","AlbertUlysses","closed",,,"","","","" -79,"refactor: add a new file to handle multiple files","Remove old code that does multiple files for a job and include a new module that handles that.","AlbertUlysses","closed",,,"","","","" -78,"feat: new script","Script that enters one fine stay data into platform table but has a different column layout then the other one fine stay sheets.","AlbertUlysses","closed",,,"","","","" -77,"refactor: include steps for entire folder","Added some new code that helps with multiple fies in a folder. This should be moved into it's own module in the future.","AlbertUlysses","closed",,,"","","","" -76,"refactor: move scripts to new folder","move all scripting files to processingscript folder and leave helper functions in the transformations folder.","AlbertUlysses","closed",,,"","","","" -75,"feat: platform one fine stay","Adding the custom script for uploading one fine stay data into platform database.","AlbertUlysses","closed",,,"","","","" -74,"refactor: refactor platform luxly","refactor the platform luxly file to make it more readable and closer allign with wemake style guide.","AlbertUlysses","closed",,,"","","","" -73,"docs: update license","updated the license to reflect Hack For LA's best practices. ","AlbertUlysses","closed",,,"","","","" -72,"feat: platform table luxly files","Completed script for loading luxly files into the platform database.","AlbertUlysses","closed",,,"","","","" -71,"feat: add hso revoked table","The script for inserting HSO Revoked data is complete.","AlbertUlysses","closed",,,"","","","" -70,"feat: exempt processing script","The processing script for the ""exempt"" dataset is done.","AlbertUlysses","closed",,,"","","","" -69,"feat: noncompliant script","Add script that inserts noncompliant data into database.","AlbertUlysses","closed",,,"","","","" -68,"feat: categorically inelligible","added a new script for the categorically ineligible dataset.","AlbertUlysses","closed",,,"","","","" -67,"feat: add complaints script","Added a new script that inputs Complaints data into the database. -","AlbertUlysses","closed",,,"","","","" -64,"feat: tot insert file","New insert script complete. -","AlbertUlysses","closed",,,"","","","" -63,"feat: add new abstraction function","refactored some code and created a new abstraction function for inserting data. ","AlbertUlysses","closed",,,"","","","" -62,"quick fix","removed absolute path : quick fix","AlbertUlysses","closed",,,"","","","" -61,"feat: hso_denials insert function","Created a new file for hso_denial table, Added test for the custom functions. Added the code to insert the hso_denials data into the db. -1st dataset that's completely done -Need to go back to refactor the code because it's ugly","AlbertUlysses","closed",,,"","","","" -60,"build(deps): bump jupyterlab from 2.1.5 to 2.2.10 in /311-data/mapillarywrapper","Bumps [jupyterlab](https://github.com/jupyterlab/jupyterlab) from 2.1.5 to 2.2.10. -
-Commits -
    -
  • 87fff87 New version
  • -
  • 8b88bd1 update canvas version to allow build
  • -
  • d5e3649 Merge pull request from GHSA-4952-p58q-6crx
  • -
  • 9a8dadf Publish 2.2.9
  • -
  • 51fe0db bump version
  • -
  • e8e144b New version
  • -
  • a67a68e Merge pull request #9211 from meeseeksmachine/auto-backport-of-pr-9189-on-2.2.x
  • -
  • 1c7d14e Merge pull request #9173 from datalayer-contrib/2-2-x/revert-perf
  • -
  • b8c5203 Backport PR #9189: Update session and kernel manager data only if there was a...
  • -
  • bbc2959 Merge pull request #9168 from karlaspuldaro/notebook-kernel-display-2.2.x
  • -
  • Additional commits viewable in compare view
  • -
-
-
- - -[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=jupyterlab&package-manager=pip&previous-version=2.1.5&new-version=2.2.10)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) - -Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. - -[//]: # (dependabot-automerge-start) -[//]: # (dependabot-automerge-end) - ---- - -
-Dependabot commands and options -
- -You can trigger Dependabot actions by commenting on this PR: -- `@dependabot rebase` will rebase this PR -- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it -- `@dependabot merge` will merge this PR after your CI passes on it -- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it -- `@dependabot cancel merge` will cancel a previously requested merge and block automerging -- `@dependabot reopen` will reopen this PR if it is closed -- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually -- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) -- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) -- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) -- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language -- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language -- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language -- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language - -You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts). - -
","dependabot[bot]","closed",,,"dependencies","","","" -58,"docs: adding COPYING file","removed some old jupyter files and added a license file which is called COPYING per GNU's suggestion.","AlbertUlysses","closed",,,"","","","" -57,"Feat: add Airbnb tables","added airbnb tables and updated SQL Alchemy relationships.","AlbertUlysses","closed",,,"","","","" -56,"feat: add SQL Alchemy models/Database","added SQL Alchemy Models and the Database code. ","AlbertUlysses","closed",,,"","","","" -55,"docs: rewrote some docstrings etc","updated some doc strings in files to better reflect the intent.","AlbertUlysses","closed",,,"","","","" -54,"feat: new code for date entries","I added new code and tests for formatting date columns. ","AlbertUlysses","closed",,,"","","","" -53,"refactor: update assessor table","Did some refactoring that addressed these issues: - -- Rewrote the code to match the WeMake style guide. -- The functions are faster, and the memory is more efficient by ten times on average. -- The functions are pure now, with no added consequence to the DataFrames or Series passed into them. -All tests are still passing.","AlbertUlysses","closed",,,"","","","" -52,"docs: re-wrote some of the normalize_address information","Docs improvement for normailze_address","AlbertUlysses","closed",,,"","","","" -51,"refactored transformation scripts","refactored a lot and added extra notes in read me ","AlbertUlysses","closed",,,"","","","" -50,"tot update/completion","ToT table is done. Refactored it a bit - still passing test -","AlbertUlysses","closed",,,"","","","" -49,"work on tot table","WIP - started work on TOT table and tests I will finish in tomorrow and start on a new table.","AlbertUlysses","closed",,,"","","","" -48,"Add expemtion table transformations","I added the transformation functions for exempt table with their tests, renamed the main folder.","AlbertUlysses","closed",,,"","","","" -47,"updating folder layout and adding transformation scripts with tests","introduces the first of a few scripts to the project along with some tests. Needs refactoring but all tests are passing. - -","AlbertUlysses","closed",,,"","","","" -43,"build(deps): bump urllib3 from 1.24.3 to 1.26.5 in /311-data/mapillarywrapper","Bumps [urllib3](https://github.com/urllib3/urllib3) from 1.24.3 to 1.26.5. -
-Release notes -

Sourced from urllib3's releases.

-
-

1.26.5

-

:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap

-
    -
  • Fixed deprecation warnings emitted in Python 3.10.
  • -
  • Updated vendored six library to 1.16.0.
  • -
  • Improved performance of URL parser when splitting the authority component.
  • -
-

If you or your organization rely on urllib3 consider supporting us via GitHub Sponsors

-

1.26.4

-

:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap

-
    -
  • Changed behavior of the default SSLContext when connecting to HTTPS proxy during HTTPS requests. The default SSLContext now sets check_hostname=True.
  • -
-

If you or your organization rely on urllib3 consider supporting us via GitHub Sponsors

-

1.26.3

-

:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap

-
    -
  • -

    Fixed bytes and string comparison issue with headers (Pull #2141)

    -
  • -
  • -

    Changed ProxySchemeUnknown error message to be more actionable if the user supplies a proxy URL without a scheme (Pull #2107)

    -
  • -
-

If you or your organization rely on urllib3 consider supporting us via GitHub Sponsors

-

1.26.2

-

:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap

-
    -
  • Fixed an issue where wrap_socket and CERT_REQUIRED wouldn't be imported properly on Python 2.7.8 and earlier (Pull #2052)
  • -
-

1.26.1

-

:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap

-
    -
  • Fixed an issue where two User-Agent headers would be sent if a User-Agent header key is passed as bytes (Pull #2047)
  • -
-

1.26.0

-

:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap

-
    -
  • -

    Added support for HTTPS proxies contacting HTTPS servers (Pull #1923, Pull #1806)

    -
  • -
  • -

    Deprecated negotiating TLSv1 and TLSv1.1 by default. Users that -still wish to use TLS earlier than 1.2 without a deprecation warning -should opt-in explicitly by setting ssl_version=ssl.PROTOCOL_TLSv1_1 (Pull #2002) -Starting in urllib3 v2.0: Connections that receive a DeprecationWarning will fail

    -
  • -
  • -

    Deprecated Retry options Retry.DEFAULT_METHOD_WHITELIST, Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST -and Retry(method_whitelist=...) in favor of Retry.DEFAULT_ALLOWED_METHODS, -Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT, and Retry(allowed_methods=...) -(Pull #2000) Starting in urllib3 v2.0: Deprecated options will be removed

    -
  • -
- -
-

... (truncated)

-
-
-Changelog -

Sourced from urllib3's changelog.

-
-

1.26.5 (2021-05-26)

-
    -
  • Fixed deprecation warnings emitted in Python 3.10.
  • -
  • Updated vendored six library to 1.16.0.
  • -
  • Improved performance of URL parser when splitting -the authority component.
  • -
-

1.26.4 (2021-03-15)

-
    -
  • Changed behavior of the default SSLContext when connecting to HTTPS proxy -during HTTPS requests. The default SSLContext now sets check_hostname=True.
  • -
-

1.26.3 (2021-01-26)

-
    -
  • -

    Fixed bytes and string comparison issue with headers (Pull #2141)

    -
  • -
  • -

    Changed ProxySchemeUnknown error message to be -more actionable if the user supplies a proxy URL without -a scheme. (Pull #2107)

    -
  • -
-

1.26.2 (2020-11-12)

-
    -
  • Fixed an issue where wrap_socket and CERT_REQUIRED wouldn't -be imported properly on Python 2.7.8 and earlier (Pull #2052)
  • -
-

1.26.1 (2020-11-11)

-
    -
  • Fixed an issue where two User-Agent headers would be sent if a -User-Agent header key is passed as bytes (Pull #2047)
  • -
-

1.26.0 (2020-11-10)

-
    -
  • -

    NOTE: urllib3 v2.0 will drop support for Python 2. -Read more in the v2.0 Roadmap <https://urllib3.readthedocs.io/en/latest/v2-roadmap.html>_.

    -
  • -
  • -

    Added support for HTTPS proxies contacting HTTPS servers (Pull #1923, Pull #1806)

    -
  • -
  • -

    Deprecated negotiating TLSv1 and TLSv1.1 by default. Users that -still wish to use TLS earlier than 1.2 without a deprecation warning

    -
  • -
- -
-

... (truncated)

-
-
-Commits -
    -
  • d161647 Release 1.26.5
  • -
  • 2d4a3fe Improve performance of sub-authority splitting in URL
  • -
  • 2698537 Update vendored six to 1.16.0
  • -
  • 07bed79 Fix deprecation warnings for Python 3.10 ssl module
  • -
  • d725a9b Add Python 3.10 to GitHub Actions
  • -
  • 339ad34 Use pytest==6.2.4 on Python 3.10+
  • -
  • f271c9c Apply latest Black formatting
  • -
  • 1884878 [1.26] Properly proxy EOF on the SSLTransport test suite
  • -
  • a891304 Release 1.26.4
  • -
  • 8d65ea1 Merge pull request from GHSA-5phf-pp7p-vc2r
  • -
  • Additional commits viewable in compare view
  • -
-
-
- - -[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=urllib3&package-manager=pip&previous-version=1.24.3&new-version=1.26.5)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) - -Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. - -[//]: # (dependabot-automerge-start) -[//]: # (dependabot-automerge-end) - ---- - -
-Dependabot commands and options -
- -You can trigger Dependabot actions by commenting on this PR: -- `@dependabot rebase` will rebase this PR -- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it -- `@dependabot merge` will merge this PR after your CI passes on it -- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it -- `@dependabot cancel merge` will cancel a previously requested merge and block automerging -- `@dependabot reopen` will reopen this PR if it is closed -- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually -- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) -- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) -- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) -- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language -- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language -- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language -- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language - -You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts). - -
","dependabot[bot]","closed",,,"dependencies","","","" -42,"Update Pedestrian Data folder","Pedestrian Data folder including the relevant notebooks and visualizations.","dplem","closed",,,"","","","" -41,"Push request redo preprocessing files","","KarinaLopez19","closed",,,"","","AlbertUlysses","" -40,"Extend Mapillary API wrapper","These are new features for the python wrapper for Mapillary's API to allow it to: - -- Make requests of other layers of Mapillary data beyond traffic signs - -- Store photo image IDs associated with each item on map - -- Parse Mapillary's latitude/longitude string format - -- Give feedback while downloading data and timeout after 5 minutes, to prevent downloads from failing silently","henrykaplan","closed",,,"","henrykaplan","dplem,AlbertUlysses","" -39,"add bbox function","Created bbox function to help create a boundary box for cleaning data. Added one test to ensure any refactoring in the future still returns the correct expected results. ","AlbertUlysses","closed",,,"","","","" -38,"this is a test","","KarinaLopez19","closed",,,"","","","" -37,"add new folder with new functions to help with clean up","Adding the folder for airbnb listing and first function for helper functions that will be used for cleaning the data -","AlbertUlysses","closed",,,"","","","" -35,"clean up jupyter notebook","cleaned up the jupyter notebook to make it more user friendly","AlbertUlysses","closed",,,"","","","" -34,"chore: clean up modules","used black to clean up modules and removed commented code that wasn't being used.","AlbertUlysses","closed",,,"","","","" +PR Number,Title,Author,State,Number of Commits,Number of Files Changed,Labels,Assignees,Reviewers,Linked Issues +205,"Added data loading and cleaning Jupyter notebook","mru-hub","closed",,,"","","","" +201,"feat: City of Los Angeles Evictions #179","rahul897","open",,,"","","","" +199,"177 create 311 data csv files that can be accessed through a jupyter notebook","mru-hub","closed",,,"","","salice","" +197,"N2020h issues to csv","n2020h","open",,,"","n2020h","","" +196,"186 Adding Crime Data","dolla24","closed",,,"","dolla24","","" +195,"fix name of image to work with Windows","salice","closed",,,"","","","" +192,"Create issues-to-csv.yml","n2020h","closed",,,"","","","" +188,"Moving Workshop-Feedback_Bigram_Analysis.ipynb from access-the-data repo","JessicaLucindaCheng","closed",,,"","","","5127, 2607, 6139, 296" +187,"Moving Workshop-Feedback_Bigram_Analysis.ipynb from access-the-data repo","JessicaLucindaCheng","closed",,,"","","","" +186,"Lalla la crime analysis 2","Lalla22","open",,,"","","salice","" +185,"Create README.md","Lalla22","closed",,,"","","","" +184,"git c","Lalla22","closed",,,"","","","" +176,"build(deps): bump cryptography from 3.3.2 to 39.0.1 in /311-data/mapillarywrapper","dependabot[bot]","open",,,"dependencies","","","" +175,"build(deps): bump certifi from 2021.5.30 to 2022.12.7 in /LAANE","dependabot[bot]","open",,,"dependencies","","","" +174,"build(deps): bump certifi from 2020.6.20 to 2022.12.7 in /311-data/mapillarywrapper","dependabot[bot]","open",,,"dependencies","","","" +173,"build(deps): bump nbconvert from 5.6.1 to 6.5.1 in /311-data/mapillarywrapper","dependabot[bot]","open",,,"dependencies","","","172" +172,"build(deps): bump nbconvert from 5.6.1 to 6.3.0 in /311-data/mapillarywrapper","dependabot[bot]","closed",,,"dependencies","","","" +171,"build(deps): bump mistune from 0.8.4 to 2.0.3 in /311-data/mapillarywrapper","dependabot[bot]","open",,,"dependencies","","","" +168,"Updating label analysis jupyter notebook with co-occurrence analysis","rbianchetti","open",,,"","","","" +167,"Merge pull request #164 from hackforla/160-survey-repo-labels","codemamma","open",,,"","","","" +166,"build(deps): bump numpy from 1.21.2 to 1.22.0 in /LAANE","dependabot[bot]","open",,,"dependencies","","","" +165,"build(deps): bump numpy from 1.18.5 to 1.22.0 in /311-data/mapillarywrapper","dependabot[bot]","open",,,"dependencies","","","142" +164,"A python script that gets all the labels used in open issues across a…","rbianchetti","closed",,,"","","","26" +159,"Linking latest pedestrian safety report presentation","henrykaplan","open",,,"","","akhaleghi","" +158,"Data Science logo links and image files","henrykaplan","closed",,,"","","akhaleghi","140" +142,"build(deps): bump numpy from 1.18.5 to 1.21.0 in /311-data/mapillarywrapper","dependabot[bot]","closed",,,"dependencies","","","" +139,"Removed spaces from all filenames in pedestrian data directory","henrykaplan","closed",,,"","","","" +132,"Pedestrian safety","henrykaplan","closed",,,"","","","" +123,"Create a new issue template for epics","akhaleghi","closed",,,"","","","120" +122,"Pedestrian safety draft visualizations and presentation","henrykaplan","closed",,,"","","AlbertUlysses","" +117,"Pedestrian safety","henrykaplan","closed",,,"","","","" +116,"webscraping folder initial commit","rajindermavi","closed",,,"","","","" +115,"docs","AlbertUlysses","closed",,,"","","","" +114,"docs","AlbertUlysses","closed",,,"","","","" +113,"docs","AlbertUlysses","closed",,,"","","","" +112,"build(deps): bump urllib3 from 1.24.3 to 1.26.5 in /LAANE","dependabot[bot]","closed",,,"dependencies","","","" +111,"Docs!","AlbertUlysses","closed",,,"","","","" +105,"debug","AlbertUlysses","closed",,,"","","","" +104,"docs/clean up","AlbertUlysses","closed",,,"","","","" +103,"feat: assessor script","AlbertUlysses","closed",,,"","","","" +102,"docs","AlbertUlysses","closed",,,"","","","" +101,"debug categorically inelligible","AlbertUlysses","closed",,,"","","","" +100,"New scripts + debug","AlbertUlysses","closed",,,"","","","" +99,"bugfix","AlbertUlysses","closed",,,"","","","" +98,"feat: new script for warning","AlbertUlysses","closed",,,"","","","" +97,"Two commits","AlbertUlysses","closed",,,"","","","" +96,"feat: new processesing script","AlbertUlysses","closed",,,"","","","" +95,"feat: airbnb dataset script","AlbertUlysses","closed",,,"","","","" +93,"debug one fine stay scripts","AlbertUlysses","closed",,,"","","","" +92,"debug","AlbertUlysses","closed",,,"","","","" +91,"debug","AlbertUlysses","closed",,,"","","","" +90,"fixbug in hsodenials","AlbertUlysses","closed",,,"","","","" +89,"debug","AlbertUlysses","closed",,,"","","","" +88,"bugfix: fixed exempt script","AlbertUlysses","closed",,,"","","","" +87,"fixbug: fixed bug in categorically inelligible","AlbertUlysses","closed",,,"","","","" +86,"bug fix: fixed bug for complaints data","AlbertUlysses","closed",,,"","","","" +85,"refactor/debug: debugging each script","AlbertUlysses","closed",,,"","","","" +84,"re-refactor normalize address","AlbertUlysses","closed",,,"","","","" +83,"refactored","AlbertUlysses","closed",,,"","","","" +82,"feat: hso_registrant script","AlbertUlysses","closed",,,"","","","" +81,"refactor: no code change files moved","AlbertUlysses","closed",,,"","","","" +79,"refactor: add a new file to handle multiple files","AlbertUlysses","closed",,,"","","","" +78,"feat: new script","AlbertUlysses","closed",,,"","","","" +77,"refactor: include steps for entire folder","AlbertUlysses","closed",,,"","","","" +76,"refactor: move scripts to new folder","AlbertUlysses","closed",,,"","","","" +75,"feat: platform one fine stay","AlbertUlysses","closed",,,"","","","" +74,"refactor: refactor platform luxly","AlbertUlysses","closed",,,"","","","" +73,"docs: update license","AlbertUlysses","closed",,,"","","","" +72,"feat: platform table luxly files","AlbertUlysses","closed",,,"","","","" +71,"feat: add hso revoked table","AlbertUlysses","closed",,,"","","","" +70,"feat: exempt processing script","AlbertUlysses","closed",,,"","","","" +69,"feat: noncompliant script","AlbertUlysses","closed",,,"","","","" +68,"feat: categorically inelligible","AlbertUlysses","closed",,,"","","","" +67,"feat: add complaints script","AlbertUlysses","closed",,,"","","","" +64,"feat: tot insert file","AlbertUlysses","closed",,,"","","","" +63,"feat: add new abstraction function","AlbertUlysses","closed",,,"","","","" +62,"quick fix","AlbertUlysses","closed",,,"","","","" +61,"feat: hso_denials insert function","AlbertUlysses","closed",,,"","","","" +60,"build(deps): bump jupyterlab from 2.1.5 to 2.2.10 in /311-data/mapillarywrapper","dependabot[bot]","closed",,,"dependencies","","","" +58,"docs: adding COPYING file","AlbertUlysses","closed",,,"","","","" +57,"Feat: add Airbnb tables","AlbertUlysses","closed",,,"","","","" +56,"feat: add SQL Alchemy models/Database","AlbertUlysses","closed",,,"","","","" +55,"docs: rewrote some docstrings etc","AlbertUlysses","closed",,,"","","","" +54,"feat: new code for date entries","AlbertUlysses","closed",,,"","","","" +53,"refactor: update assessor table","AlbertUlysses","closed",,,"","","","" +52,"docs: re-wrote some of the normalize_address information","AlbertUlysses","closed",,,"","","","" +51,"refactored transformation scripts","AlbertUlysses","closed",,,"","","","" +50,"tot update/completion","AlbertUlysses","closed",,,"","","","" +49,"work on tot table","AlbertUlysses","closed",,,"","","","" +48,"Add expemtion table transformations","AlbertUlysses","closed",,,"","","","" +47,"updating folder layout and adding transformation scripts with tests","AlbertUlysses","closed",,,"","","","" +43,"build(deps): bump urllib3 from 1.24.3 to 1.26.5 in /311-data/mapillarywrapper","dependabot[bot]","closed",,,"dependencies","","","" +42,"Update Pedestrian Data folder","dplem","closed",,,"","","","" +41,"Push request redo preprocessing files","KarinaLopez19","closed",,,"","","AlbertUlysses","" +40,"Extend Mapillary API wrapper","henrykaplan","closed",,,"","henrykaplan","dplem,AlbertUlysses","" +39,"add bbox function","AlbertUlysses","closed",,,"","","","" +38,"this is a test","KarinaLopez19","closed",,,"","","","" +37,"add new folder with new functions to help with clean up","AlbertUlysses","closed",,,"","","","" +35,"clean up jupyter notebook","AlbertUlysses","closed",,,"","","","" +34,"chore: clean up modules","AlbertUlysses","closed",,,"","","","" From 314b51821da85a3b1d650c07a4f752affc58e115 Mon Sep 17 00:00:00 2001 From: n2020h <72112832+n2020h@users.noreply.github.com> Date: Mon, 12 Aug 2024 17:27:13 -0400 Subject: [PATCH 43/49] Update pull_requests_to_csv.yml removed fields --- .github/workflows/pull_requests_to_csv.yml | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/.github/workflows/pull_requests_to_csv.yml b/.github/workflows/pull_requests_to_csv.yml index cfc6fac..bacab29 100644 --- a/.github/workflows/pull_requests_to_csv.yml +++ b/.github/workflows/pull_requests_to_csv.yml @@ -48,7 +48,7 @@ jobs: # Generate pull requests CSV including linked issues - name: Generate pull requests CSV including linked issues run: | - echo "PR Number,Title,Author,State,Number of Commits,Number of Files Changed,Labels,Assignees,Reviewers,Linked Issues" > pull_requests.csv + echo "PR Number,Title,Author,State,Labels,Assignees,Reviewers,Linked Issues" > pull_requests.csv for pr_number in $(jq -r '.[].number' pulls.json); do timeline_file="timeline_$pr_number.json" @@ -66,8 +66,6 @@ jobs: .title, .user.login, .state, - .commits, - .changed_files, (.labels | map(.name) | join(",")), (.assignees | map(.login) | join(",")), (.requested_reviewers | map(.login) | join(",")), From 75b87d515b634a3eb8b45ce64452ce82aa1c9b42 Mon Sep 17 00:00:00 2001 From: Automated Date: Mon, 12 Aug 2024 21:28:03 +0000 Subject: [PATCH 44/49] Latest pull requests data: Mon Aug 12 21:28:03 UTC 2024 --- pull_requests.csv | 202 +++++++++++++++++++++++----------------------- 1 file changed, 101 insertions(+), 101 deletions(-) diff --git a/pull_requests.csv b/pull_requests.csv index 628e482..eaca97d 100644 --- a/pull_requests.csv +++ b/pull_requests.csv @@ -1,101 +1,101 @@ -PR Number,Title,Author,State,Number of Commits,Number of Files Changed,Labels,Assignees,Reviewers,Linked Issues -205,"Added data loading and cleaning Jupyter notebook","mru-hub","closed",,,"","","","" -201,"feat: City of Los Angeles Evictions #179","rahul897","open",,,"","","","" -199,"177 create 311 data csv files that can be accessed through a jupyter notebook","mru-hub","closed",,,"","","salice","" -197,"N2020h issues to csv","n2020h","open",,,"","n2020h","","" -196,"186 Adding Crime Data","dolla24","closed",,,"","dolla24","","" -195,"fix name of image to work with Windows","salice","closed",,,"","","","" -192,"Create issues-to-csv.yml","n2020h","closed",,,"","","","" -188,"Moving Workshop-Feedback_Bigram_Analysis.ipynb from access-the-data repo","JessicaLucindaCheng","closed",,,"","","","5127, 2607, 6139, 296" -187,"Moving Workshop-Feedback_Bigram_Analysis.ipynb from access-the-data repo","JessicaLucindaCheng","closed",,,"","","","" -186,"Lalla la crime analysis 2","Lalla22","open",,,"","","salice","" -185,"Create README.md","Lalla22","closed",,,"","","","" -184,"git c","Lalla22","closed",,,"","","","" -176,"build(deps): bump cryptography from 3.3.2 to 39.0.1 in /311-data/mapillarywrapper","dependabot[bot]","open",,,"dependencies","","","" -175,"build(deps): bump certifi from 2021.5.30 to 2022.12.7 in /LAANE","dependabot[bot]","open",,,"dependencies","","","" -174,"build(deps): bump certifi from 2020.6.20 to 2022.12.7 in /311-data/mapillarywrapper","dependabot[bot]","open",,,"dependencies","","","" -173,"build(deps): bump nbconvert from 5.6.1 to 6.5.1 in /311-data/mapillarywrapper","dependabot[bot]","open",,,"dependencies","","","172" -172,"build(deps): bump nbconvert from 5.6.1 to 6.3.0 in /311-data/mapillarywrapper","dependabot[bot]","closed",,,"dependencies","","","" -171,"build(deps): bump mistune from 0.8.4 to 2.0.3 in /311-data/mapillarywrapper","dependabot[bot]","open",,,"dependencies","","","" -168,"Updating label analysis jupyter notebook with co-occurrence analysis","rbianchetti","open",,,"","","","" -167,"Merge pull request #164 from hackforla/160-survey-repo-labels","codemamma","open",,,"","","","" -166,"build(deps): bump numpy from 1.21.2 to 1.22.0 in /LAANE","dependabot[bot]","open",,,"dependencies","","","" -165,"build(deps): bump numpy from 1.18.5 to 1.22.0 in /311-data/mapillarywrapper","dependabot[bot]","open",,,"dependencies","","","142" -164,"A python script that gets all the labels used in open issues across a…","rbianchetti","closed",,,"","","","26" -159,"Linking latest pedestrian safety report presentation","henrykaplan","open",,,"","","akhaleghi","" -158,"Data Science logo links and image files","henrykaplan","closed",,,"","","akhaleghi","140" -142,"build(deps): bump numpy from 1.18.5 to 1.21.0 in /311-data/mapillarywrapper","dependabot[bot]","closed",,,"dependencies","","","" -139,"Removed spaces from all filenames in pedestrian data directory","henrykaplan","closed",,,"","","","" -132,"Pedestrian safety","henrykaplan","closed",,,"","","","" -123,"Create a new issue template for epics","akhaleghi","closed",,,"","","","120" -122,"Pedestrian safety draft visualizations and presentation","henrykaplan","closed",,,"","","AlbertUlysses","" -117,"Pedestrian safety","henrykaplan","closed",,,"","","","" -116,"webscraping folder initial commit","rajindermavi","closed",,,"","","","" -115,"docs","AlbertUlysses","closed",,,"","","","" -114,"docs","AlbertUlysses","closed",,,"","","","" -113,"docs","AlbertUlysses","closed",,,"","","","" -112,"build(deps): bump urllib3 from 1.24.3 to 1.26.5 in /LAANE","dependabot[bot]","closed",,,"dependencies","","","" -111,"Docs!","AlbertUlysses","closed",,,"","","","" -105,"debug","AlbertUlysses","closed",,,"","","","" -104,"docs/clean up","AlbertUlysses","closed",,,"","","","" -103,"feat: assessor script","AlbertUlysses","closed",,,"","","","" -102,"docs","AlbertUlysses","closed",,,"","","","" -101,"debug categorically inelligible","AlbertUlysses","closed",,,"","","","" -100,"New scripts + debug","AlbertUlysses","closed",,,"","","","" -99,"bugfix","AlbertUlysses","closed",,,"","","","" -98,"feat: new script for warning","AlbertUlysses","closed",,,"","","","" -97,"Two commits","AlbertUlysses","closed",,,"","","","" -96,"feat: new processesing script","AlbertUlysses","closed",,,"","","","" -95,"feat: airbnb dataset script","AlbertUlysses","closed",,,"","","","" -93,"debug one fine stay scripts","AlbertUlysses","closed",,,"","","","" -92,"debug","AlbertUlysses","closed",,,"","","","" -91,"debug","AlbertUlysses","closed",,,"","","","" -90,"fixbug in hsodenials","AlbertUlysses","closed",,,"","","","" -89,"debug","AlbertUlysses","closed",,,"","","","" -88,"bugfix: fixed exempt script","AlbertUlysses","closed",,,"","","","" -87,"fixbug: fixed bug in categorically inelligible","AlbertUlysses","closed",,,"","","","" -86,"bug fix: fixed bug for complaints data","AlbertUlysses","closed",,,"","","","" -85,"refactor/debug: debugging each script","AlbertUlysses","closed",,,"","","","" -84,"re-refactor normalize address","AlbertUlysses","closed",,,"","","","" -83,"refactored","AlbertUlysses","closed",,,"","","","" -82,"feat: hso_registrant script","AlbertUlysses","closed",,,"","","","" -81,"refactor: no code change files moved","AlbertUlysses","closed",,,"","","","" -79,"refactor: add a new file to handle multiple files","AlbertUlysses","closed",,,"","","","" -78,"feat: new script","AlbertUlysses","closed",,,"","","","" -77,"refactor: include steps for entire folder","AlbertUlysses","closed",,,"","","","" -76,"refactor: move scripts to new folder","AlbertUlysses","closed",,,"","","","" -75,"feat: platform one fine stay","AlbertUlysses","closed",,,"","","","" -74,"refactor: refactor platform luxly","AlbertUlysses","closed",,,"","","","" -73,"docs: update license","AlbertUlysses","closed",,,"","","","" -72,"feat: platform table luxly files","AlbertUlysses","closed",,,"","","","" -71,"feat: add hso revoked table","AlbertUlysses","closed",,,"","","","" -70,"feat: exempt processing script","AlbertUlysses","closed",,,"","","","" -69,"feat: noncompliant script","AlbertUlysses","closed",,,"","","","" -68,"feat: categorically inelligible","AlbertUlysses","closed",,,"","","","" -67,"feat: add complaints script","AlbertUlysses","closed",,,"","","","" -64,"feat: tot insert file","AlbertUlysses","closed",,,"","","","" -63,"feat: add new abstraction function","AlbertUlysses","closed",,,"","","","" -62,"quick fix","AlbertUlysses","closed",,,"","","","" -61,"feat: hso_denials insert function","AlbertUlysses","closed",,,"","","","" -60,"build(deps): bump jupyterlab from 2.1.5 to 2.2.10 in /311-data/mapillarywrapper","dependabot[bot]","closed",,,"dependencies","","","" -58,"docs: adding COPYING file","AlbertUlysses","closed",,,"","","","" -57,"Feat: add Airbnb tables","AlbertUlysses","closed",,,"","","","" -56,"feat: add SQL Alchemy models/Database","AlbertUlysses","closed",,,"","","","" -55,"docs: rewrote some docstrings etc","AlbertUlysses","closed",,,"","","","" -54,"feat: new code for date entries","AlbertUlysses","closed",,,"","","","" -53,"refactor: update assessor table","AlbertUlysses","closed",,,"","","","" -52,"docs: re-wrote some of the normalize_address information","AlbertUlysses","closed",,,"","","","" -51,"refactored transformation scripts","AlbertUlysses","closed",,,"","","","" -50,"tot update/completion","AlbertUlysses","closed",,,"","","","" -49,"work on tot table","AlbertUlysses","closed",,,"","","","" -48,"Add expemtion table transformations","AlbertUlysses","closed",,,"","","","" -47,"updating folder layout and adding transformation scripts with tests","AlbertUlysses","closed",,,"","","","" -43,"build(deps): bump urllib3 from 1.24.3 to 1.26.5 in /311-data/mapillarywrapper","dependabot[bot]","closed",,,"dependencies","","","" -42,"Update Pedestrian Data folder","dplem","closed",,,"","","","" -41,"Push request redo preprocessing files","KarinaLopez19","closed",,,"","","AlbertUlysses","" -40,"Extend Mapillary API wrapper","henrykaplan","closed",,,"","henrykaplan","dplem,AlbertUlysses","" -39,"add bbox function","AlbertUlysses","closed",,,"","","","" -38,"this is a test","KarinaLopez19","closed",,,"","","","" -37,"add new folder with new functions to help with clean up","AlbertUlysses","closed",,,"","","","" -35,"clean up jupyter notebook","AlbertUlysses","closed",,,"","","","" -34,"chore: clean up modules","AlbertUlysses","closed",,,"","","","" +PR Number,Title,Author,State,Labels,Assignees,Reviewers,Linked Issues +205,"Added data loading and cleaning Jupyter notebook","mru-hub","closed","","","","" +201,"feat: City of Los Angeles Evictions #179","rahul897","open","","","","" +199,"177 create 311 data csv files that can be accessed through a jupyter notebook","mru-hub","closed","","","salice","" +197,"N2020h issues to csv","n2020h","open","","n2020h","","" +196,"186 Adding Crime Data","dolla24","closed","","dolla24","","" +195,"fix name of image to work with Windows","salice","closed","","","","" +192,"Create issues-to-csv.yml","n2020h","closed","","","","" +188,"Moving Workshop-Feedback_Bigram_Analysis.ipynb from access-the-data repo","JessicaLucindaCheng","closed","","","","5127, 2607, 6139, 296" +187,"Moving Workshop-Feedback_Bigram_Analysis.ipynb from access-the-data repo","JessicaLucindaCheng","closed","","","","" +186,"Lalla la crime analysis 2","Lalla22","open","","","salice","" +185,"Create README.md","Lalla22","closed","","","","" +184,"git c","Lalla22","closed","","","","" +176,"build(deps): bump cryptography from 3.3.2 to 39.0.1 in /311-data/mapillarywrapper","dependabot[bot]","open","dependencies","","","" +175,"build(deps): bump certifi from 2021.5.30 to 2022.12.7 in /LAANE","dependabot[bot]","open","dependencies","","","" +174,"build(deps): bump certifi from 2020.6.20 to 2022.12.7 in /311-data/mapillarywrapper","dependabot[bot]","open","dependencies","","","" +173,"build(deps): bump nbconvert from 5.6.1 to 6.5.1 in /311-data/mapillarywrapper","dependabot[bot]","open","dependencies","","","172" +172,"build(deps): bump nbconvert from 5.6.1 to 6.3.0 in /311-data/mapillarywrapper","dependabot[bot]","closed","dependencies","","","" +171,"build(deps): bump mistune from 0.8.4 to 2.0.3 in /311-data/mapillarywrapper","dependabot[bot]","open","dependencies","","","" +168,"Updating label analysis jupyter notebook with co-occurrence analysis","rbianchetti","open","","","","" +167,"Merge pull request #164 from hackforla/160-survey-repo-labels","codemamma","open","","","","" +166,"build(deps): bump numpy from 1.21.2 to 1.22.0 in /LAANE","dependabot[bot]","open","dependencies","","","" +165,"build(deps): bump numpy from 1.18.5 to 1.22.0 in /311-data/mapillarywrapper","dependabot[bot]","open","dependencies","","","142" +164,"A python script that gets all the labels used in open issues across a…","rbianchetti","closed","","","","26" +159,"Linking latest pedestrian safety report presentation","henrykaplan","open","","","akhaleghi","" +158,"Data Science logo links and image files","henrykaplan","closed","","","akhaleghi","140" +142,"build(deps): bump numpy from 1.18.5 to 1.21.0 in /311-data/mapillarywrapper","dependabot[bot]","closed","dependencies","","","" +139,"Removed spaces from all filenames in pedestrian data directory","henrykaplan","closed","","","","" +132,"Pedestrian safety","henrykaplan","closed","","","","" +123,"Create a new issue template for epics","akhaleghi","closed","","","","120" +122,"Pedestrian safety draft visualizations and presentation","henrykaplan","closed","","","AlbertUlysses","" +117,"Pedestrian safety","henrykaplan","closed","","","","" +116,"webscraping folder initial commit","rajindermavi","closed","","","","" +115,"docs","AlbertUlysses","closed","","","","" +114,"docs","AlbertUlysses","closed","","","","" +113,"docs","AlbertUlysses","closed","","","","" +112,"build(deps): bump urllib3 from 1.24.3 to 1.26.5 in /LAANE","dependabot[bot]","closed","dependencies","","","" +111,"Docs!","AlbertUlysses","closed","","","","" +105,"debug","AlbertUlysses","closed","","","","" +104,"docs/clean up","AlbertUlysses","closed","","","","" +103,"feat: assessor script","AlbertUlysses","closed","","","","" +102,"docs","AlbertUlysses","closed","","","","" +101,"debug categorically inelligible","AlbertUlysses","closed","","","","" +100,"New scripts + debug","AlbertUlysses","closed","","","","" +99,"bugfix","AlbertUlysses","closed","","","","" +98,"feat: new script for warning","AlbertUlysses","closed","","","","" +97,"Two commits","AlbertUlysses","closed","","","","" +96,"feat: new processesing script","AlbertUlysses","closed","","","","" +95,"feat: airbnb dataset script","AlbertUlysses","closed","","","","" +93,"debug one fine stay scripts","AlbertUlysses","closed","","","","" +92,"debug","AlbertUlysses","closed","","","","" +91,"debug","AlbertUlysses","closed","","","","" +90,"fixbug in hsodenials","AlbertUlysses","closed","","","","" +89,"debug","AlbertUlysses","closed","","","","" +88,"bugfix: fixed exempt script","AlbertUlysses","closed","","","","" +87,"fixbug: fixed bug in categorically inelligible","AlbertUlysses","closed","","","","" +86,"bug fix: fixed bug for complaints data","AlbertUlysses","closed","","","","" +85,"refactor/debug: debugging each script","AlbertUlysses","closed","","","","" +84,"re-refactor normalize address","AlbertUlysses","closed","","","","" +83,"refactored","AlbertUlysses","closed","","","","" +82,"feat: hso_registrant script","AlbertUlysses","closed","","","","" +81,"refactor: no code change files moved","AlbertUlysses","closed","","","","" +79,"refactor: add a new file to handle multiple files","AlbertUlysses","closed","","","","" +78,"feat: new script","AlbertUlysses","closed","","","","" +77,"refactor: include steps for entire folder","AlbertUlysses","closed","","","","" +76,"refactor: move scripts to new folder","AlbertUlysses","closed","","","","" +75,"feat: platform one fine stay","AlbertUlysses","closed","","","","" +74,"refactor: refactor platform luxly","AlbertUlysses","closed","","","","" +73,"docs: update license","AlbertUlysses","closed","","","","" +72,"feat: platform table luxly files","AlbertUlysses","closed","","","","" +71,"feat: add hso revoked table","AlbertUlysses","closed","","","","" +70,"feat: exempt processing script","AlbertUlysses","closed","","","","" +69,"feat: noncompliant script","AlbertUlysses","closed","","","","" +68,"feat: categorically inelligible","AlbertUlysses","closed","","","","" +67,"feat: add complaints script","AlbertUlysses","closed","","","","" +64,"feat: tot insert file","AlbertUlysses","closed","","","","" +63,"feat: add new abstraction function","AlbertUlysses","closed","","","","" +62,"quick fix","AlbertUlysses","closed","","","","" +61,"feat: hso_denials insert function","AlbertUlysses","closed","","","","" +60,"build(deps): bump jupyterlab from 2.1.5 to 2.2.10 in /311-data/mapillarywrapper","dependabot[bot]","closed","dependencies","","","" +58,"docs: adding COPYING file","AlbertUlysses","closed","","","","" +57,"Feat: add Airbnb tables","AlbertUlysses","closed","","","","" +56,"feat: add SQL Alchemy models/Database","AlbertUlysses","closed","","","","" +55,"docs: rewrote some docstrings etc","AlbertUlysses","closed","","","","" +54,"feat: new code for date entries","AlbertUlysses","closed","","","","" +53,"refactor: update assessor table","AlbertUlysses","closed","","","","" +52,"docs: re-wrote some of the normalize_address information","AlbertUlysses","closed","","","","" +51,"refactored transformation scripts","AlbertUlysses","closed","","","","" +50,"tot update/completion","AlbertUlysses","closed","","","","" +49,"work on tot table","AlbertUlysses","closed","","","","" +48,"Add expemtion table transformations","AlbertUlysses","closed","","","","" +47,"updating folder layout and adding transformation scripts with tests","AlbertUlysses","closed","","","","" +43,"build(deps): bump urllib3 from 1.24.3 to 1.26.5 in /311-data/mapillarywrapper","dependabot[bot]","closed","dependencies","","","" +42,"Update Pedestrian Data folder","dplem","closed","","","","" +41,"Push request redo preprocessing files","KarinaLopez19","closed","","","AlbertUlysses","" +40,"Extend Mapillary API wrapper","henrykaplan","closed","","henrykaplan","dplem,AlbertUlysses","" +39,"add bbox function","AlbertUlysses","closed","","","","" +38,"this is a test","KarinaLopez19","closed","","","","" +37,"add new folder with new functions to help with clean up","AlbertUlysses","closed","","","","" +35,"clean up jupyter notebook","AlbertUlysses","closed","","","","" +34,"chore: clean up modules","AlbertUlysses","closed","","","","" From e595741ce46179b56a861e4b2a1ba43f0a943fde Mon Sep 17 00:00:00 2001 From: n2020h <72112832+n2020h@users.noreply.github.com> Date: Mon, 12 Aug 2024 17:30:25 -0400 Subject: [PATCH 45/49] Update pull_requests_to_csv.yml debug, read output --- .github/workflows/pull_requests_to_csv.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/pull_requests_to_csv.yml b/.github/workflows/pull_requests_to_csv.yml index bacab29..ae25204 100644 --- a/.github/workflows/pull_requests_to_csv.yml +++ b/.github/workflows/pull_requests_to_csv.yml @@ -45,6 +45,10 @@ jobs: -o "timeline_$pr_number.json"; \ done + # Debugging: Display timeline JSON for a PR + - name: Display timeline JSON content for a PR + run: cat timeline_*.json + # Generate pull requests CSV including linked issues - name: Generate pull requests CSV including linked issues run: | From 74af13910b4d4c762a15c9ee67ffcdbd3f84de0e Mon Sep 17 00:00:00 2001 From: n2020h <72112832+n2020h@users.noreply.github.com> Date: Sun, 25 Aug 2024 22:50:01 -0400 Subject: [PATCH 46/49] Add more linked issues.yml --- .github/workflows/pull_requests_to_csv.yml | 65 ++++++++++++++++++---- 1 file changed, 53 insertions(+), 12 deletions(-) diff --git a/.github/workflows/pull_requests_to_csv.yml b/.github/workflows/pull_requests_to_csv.yml index ae25204..14c7da8 100644 --- a/.github/workflows/pull_requests_to_csv.yml +++ b/.github/workflows/pull_requests_to_csv.yml @@ -49,34 +49,75 @@ jobs: - name: Display timeline JSON content for a PR run: cat timeline_*.json - # Generate pull requests CSV including linked issues - - name: Generate pull requests CSV including linked issues + + # # Generate pull requests CSV including linked issues + # - name: Generate pull requests CSV including linked issues + # run: | + # echo "PR Number,Title,Author,State,Labels,Assignees,Reviewers,Linked Issues" > pull_requests.csv + + # for pr_number in $(jq -r '.[].number' pulls.json); do + # timeline_file="timeline_$pr_number.json" + + # # Ensure the timeline file is not empty before processing + # if [ -s "$timeline_file" ]; then + # linked_issues=$(jq -r '[.[] | select(.event == "cross-referenced" and .source.issue) | .source.issue.number | tostring] | join(", ")' "$timeline_file") + # else + # linked_issues="" + # fi + + # jq -r --arg linked_issues "$linked_issues" \ + # '.[] | select(.number == '$pr_number') | [ + # .number, + # .title, + # .user.login, + # .state, + # (.labels | map(.name) | join(",")), + # (.assignees | map(.login) | join(",")), + # (.requested_reviewers | map(.login) | join(",")), + # $linked_issues + # ] | @csv' pulls.json >> pull_requests.csv + # done + + # Generate pull requests CSV including linked issues and has_issues field + - name: Generate pull requests CSV including linked issues and has_issues run: | - echo "PR Number,Title,Author,State,Labels,Assignees,Reviewers,Linked Issues" > pull_requests.csv + echo "PR Number,Title,Description,Author,State,Number of Commits,Number of Files Changed,Labels,Assignees,Reviewers,Linked Issues,Has Issues" > pull_requests.csv for pr_number in $(jq -r '.[].number' pulls.json); do timeline_file="timeline_$pr_number.json" - # Ensure the timeline file is not empty before processing - if [ -s "$timeline_file" ]; then - linked_issues=$(jq -r '[.[] | select(.event == "cross-referenced" and .source.issue) | .source.issue.number | tostring] | join(", ")' "$timeline_file") - else - linked_issues="" - fi + # Extract linked issues from timeline, body, reactions, and issue_url + linked_issues_timeline=$(jq -r '[.[] | select(.event == "cross-referenced" and .source.issue) | .source.issue.url | capture("/(?\\d+)$").issue_number] | join(", ")' "$timeline_file") + + linked_issues_body=$(jq -r '.body | capture_all("#(?\\d+)"; "g") | join(",")' pulls.json) + + linked_issues_reactions=$(jq -r '.reactions.url | capture("/issues/(?\\d+)/reactions").issue_number' pulls.json) - jq -r --arg linked_issues "$linked_issues" \ + linked_issues_direct=$(jq -r '.issue_url | capture("/issues/(?\\d+)$").issue_number' pulls.json) + + # Combine all extracted issue numbers + linked_issues=$(echo "$linked_issues_timeline, $linked_issues_body, $linked_issues_reactions, $linked_issues_direct" | sed 's/^, //; s/, $//; s/,,/,/g') + + # Extract has_issues field + has_issues=$(jq -r '.repository.has_issues' pulls.json) + + jq -r --arg linked_issues "$linked_issues" --arg has_issues "$has_issues" \ '.[] | select(.number == '$pr_number') | [ .number, .title, + .body, .user.login, .state, + .commits, + .changed_files, (.labels | map(.name) | join(",")), (.assignees | map(.login) | join(",")), (.requested_reviewers | map(.login) | join(",")), - $linked_issues + $linked_issues, + $has_issues ] | @csv' pulls.json >> pull_requests.csv done - + # Check the content of pull_requests.csv for debugging - name: Display pull_requests.csv content run: cat pull_requests.csv From 06265c42434bf0cfd4b7bbeb7f5be21b974515cf Mon Sep 17 00:00:00 2001 From: Automated Date: Mon, 26 Aug 2024 02:50:17 +0000 Subject: [PATCH 47/49] Latest data: Mon Aug 26 02:50:17 UTC 2024 --- issues.csv | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/issues.csv b/issues.csv index b4441d7..da09118 100644 --- a/issues.csv +++ b/issues.csv @@ -1,8 +1,11 @@ Issue Number,Title,Labels,State,User Login,User ID,Assignee Login,Assignee ID +209,"[Job Match] Explore data, perform EDA and relevant data science work","","open","sudhara",7834544,"sudhara",7834544 +207,"LA Controller Data Projects","CoP: Data Science","open","salice",4333657,"FragariaChemist",103977933 +206,"LA Metro Real Time Transit Data","CoP: Data Science","open","salice",4333657,"tpham16",110870494 204,"Recruit volunteers for team open roles","role: missing,epic,ready for product,size: 0.25pt,feature: recruiting","open","akhaleghi",7635911,"None","None" 203,"Prep project boards for Migration","role: product","open","akhaleghi",7635911,"akhaleghi",7635911 202,"Information for New and Existing Members","CoP: Data Science","open","akhaleghi",7635911,"None","None" -200,"EPA Data Set","","open","akhaleghi",7635911,"salice",4333657 +200,"EPA Data Set","","open","akhaleghi",7635911,"KarinaLopez19",22568552 198,"Update the About us page on wiki","role: product,feature: onboarding,project duration: one time,ready for product,project: Data Science CoP Maintenance,size: 0.25pt","open","ExperimentsInHonesty",37763229,"max1million101",122141183 194,"Create data dictionary (EDA task)","feature: missing,role: missing,size: missing,project: missing","open","Lalla22",47159210,"None","None" 193,"DRAFT: Access to ""Third Spaces""","feature: missing,role: missing,size: missing,project: missing","open","akhaleghi",7635911,"None","None" @@ -15,16 +18,16 @@ Issue Number,Title,Labels,State,User Login,User ID,Assignee Login,Assignee ID 180,"City of Los Angeles Arrests","role: data science,epic,size: 3pt,project: EDA","open","akhaleghi",7635911,"dolla24",13824693 179,"CoP: Data Science: City of Los Angeles Evictions","role: data science,epic,size: 3pt,project: EDA","closed","akhaleghi",7635911,"rahul897",5197842 178,"CoP: Data Science: Find data sources through the LA Controller's office","documentation,good first issue,role: data analysis,size: 1pt,project: missing","open","akhaleghi",7635911,"RomyPatel",54936257 -177,"Create 311 data CSV files that can be accessed through a Jupyter notebook","role: data science,project: 311 Data,size: 3pt,feature: project management","closed","akhaleghi",7635911,"mru-hub",105573589 +177,"Create 311 data CSV files that can be accessed through a Jupyter notebook","role: data science,project: 311 Data,size: 3pt,feature: project management","open","akhaleghi",7635911,"mru-hub",105573589 170,"California Grants Portal Data Set EDA","role: data analysis,project duration: ongoing,size: 1pt,project: EDA","closed","akhaleghi",7635911,"jossus657",97187879 169,"Potential Projects: California Grants Portal","role: data science,epic,size: epic,dependency,project: EDA","closed","akhaleghi",7635911,"mihikasahani",102847564 -163,"CoP: Data Science: Complete Intro to Command Line and Git Tutorial","feature: guide,role: data science,size: 1pt,role: Data Engineer","open","akhaleghi",7635911,"None","None" -162,"CoP: Data Science: Complete Intro to Python Tutorial","feature: guide,role: data science,size: 1pt,draft,ready for product","open","akhaleghi",7635911,"None","None" +163,"CoP: Data Science: Complete Intro to Command Line and Git Tutorial","feature: guide,role: data science,size: 1pt,role: Data Engineer","open","akhaleghi",7635911,"ZeelDesai00",119442574 +162,"CoP: Data Science: Complete Intro to Python Tutorial","feature: guide,role: data science,size: 1pt,draft,ready for product","open","akhaleghi",7635911,"SaiPranaswi23",130614655 161,"Project Name: Lucky Parking","feature: missing,role: data analysis,size: 1pt,project: missing","closed","gregpawin",36276149,"None","None" 157,"CoP: Data Science: Create Deep Learning Tutorial","documentation,feature: guide,role: data science,size: 1pt","open","akhaleghi",7635911,"elliealbertson",120353163 156,"CoP: Data Science: Create Machine Learning Tutorial","documentation,feature: guide,role: data science,size: 1pt","open","akhaleghi",7635911,"BhavanaSai12",173202606 -155,"CoP: Data Science: Create Stats Tutorial","documentation,feature: guide,role: data science,role: data analysis,size: 1pt","open","akhaleghi",7635911,"None","None" -154,"CoP: Data Science: Create Data Ops Tutorial","documentation,feature: guide,size: 1pt,role: Data Engineer","open","akhaleghi",7635911,"None","None" +155,"CoP: Data Science: Create Stats Tutorial","documentation,feature: guide,role: data science,role: data analysis,size: 1pt","open","akhaleghi",7635911,"nehathombare21",163942514 +154,"CoP: Data Science: Create Data Ops Tutorial","documentation,feature: guide,size: 1pt,role: Data Engineer","open","akhaleghi",7635911,"endlesslupita",6325722 153,"CoP: Data Science: Create Text Analysis Tutorial","documentation,feature: guide,role: data analysis,size: 1pt","open","akhaleghi",7635911,"bfang22",111911687 152,"CoP: Data Science: Open Source Projects Data Set for Climate projects","role: data analysis,size: 1pt,epic: GreenEarthOS,project: climate projects,draft","open","akhaleghi",7635911,"noneill256",100643509 149,"Weekly Label Check","role: product,size: 1pt,feature: project management","open","ExperimentsInHonesty",37763229,"None","None" @@ -48,4 +51,3 @@ Issue Number,Title,Labels,State,User Login,User ID,Assignee Login,Assignee ID 121,"Template For Creating Epics For Data Science Projects","role: product,size: 1pt,feature: project management","closed","akhaleghi",7635911,"akhaleghi",7635911 120,"Structured Context for HfLA created data sets","feature: guide,role: data science,size: epic,project: structured context,TG: Draft Template,CoP: Data Science","open","ExperimentsInHonesty",37763229,"None","None" 118,"CoP: Data Science: Create district types reusable tool (API, single dataset, etc.)","feature: guide,role: data analysis,epic,size: epic,size: 1pt","open","ExperimentsInHonesty",37763229,"parcheesime",38143160 -94,"Data Science Competitive/Comparative Analysis","feature: guide,role: product,size: 2pt","closed","akhaleghi",7635911,"None","None" From 635d295c9c7fa08f55ba6ff1b6a36cd6d6b282f2 Mon Sep 17 00:00:00 2001 From: n2020h <72112832+n2020h@users.noreply.github.com> Date: Sun, 25 Aug 2024 22:55:54 -0400 Subject: [PATCH 48/49] Adding linked issues.yml --- .github/workflows/pull_requests_to_csv.yml | 77 +++++++++++----------- 1 file changed, 38 insertions(+), 39 deletions(-) diff --git a/.github/workflows/pull_requests_to_csv.yml b/.github/workflows/pull_requests_to_csv.yml index 14c7da8..99e8554 100644 --- a/.github/workflows/pull_requests_to_csv.yml +++ b/.github/workflows/pull_requests_to_csv.yml @@ -77,46 +77,45 @@ jobs: # $linked_issues # ] | @csv' pulls.json >> pull_requests.csv # done - - # Generate pull requests CSV including linked issues and has_issues field - - name: Generate pull requests CSV including linked issues and has_issues - run: | - echo "PR Number,Title,Description,Author,State,Number of Commits,Number of Files Changed,Labels,Assignees,Reviewers,Linked Issues,Has Issues" > pull_requests.csv - - for pr_number in $(jq -r '.[].number' pulls.json); do - timeline_file="timeline_$pr_number.json" - - # Extract linked issues from timeline, body, reactions, and issue_url - linked_issues_timeline=$(jq -r '[.[] | select(.event == "cross-referenced" and .source.issue) | .source.issue.url | capture("/(?\\d+)$").issue_number] | join(", ")' "$timeline_file") - - linked_issues_body=$(jq -r '.body | capture_all("#(?\\d+)"; "g") | join(",")' pulls.json) + - name: Generate pull requests CSV including linked issues and has_issues + run: | + echo "PR Number,Title,Description,Author,State,Number of Commits,Number of Files Changed,Labels,Assignees,Reviewers,Linked Issues,Has Issues" > pull_requests.csv - linked_issues_reactions=$(jq -r '.reactions.url | capture("/issues/(?\\d+)/reactions").issue_number' pulls.json) - - linked_issues_direct=$(jq -r '.issue_url | capture("/issues/(?\\d+)$").issue_number' pulls.json) - - # Combine all extracted issue numbers - linked_issues=$(echo "$linked_issues_timeline, $linked_issues_body, $linked_issues_reactions, $linked_issues_direct" | sed 's/^, //; s/, $//; s/,,/,/g') - - # Extract has_issues field - has_issues=$(jq -r '.repository.has_issues' pulls.json) - - jq -r --arg linked_issues "$linked_issues" --arg has_issues "$has_issues" \ - '.[] | select(.number == '$pr_number') | [ - .number, - .title, - .body, - .user.login, - .state, - .commits, - .changed_files, - (.labels | map(.name) | join(",")), - (.assignees | map(.login) | join(",")), - (.requested_reviewers | map(.login) | join(",")), - $linked_issues, - $has_issues - ] | @csv' pulls.json >> pull_requests.csv - done + for pr_number in $(jq -r '.[].number' pulls.json); do + timeline_file="timeline_$pr_number.json" + + if [ -f "$timeline_file" ]; then + linked_issues_timeline=$(jq -r '[.[] | select(.event == "cross-referenced" and .source.issue) | .source.issue.url | capture("/(?\\d+)$").issue_number] | join(", ")' "$timeline_file") + else + linked_issues_timeline="" + fi + + linked_issues_body=$(jq -r '.[] | select(.number == '$pr_number') | .body | capture_all("#(?\\d+)"; "g") | join(",")' pulls.json || echo "") + + linked_issues_reactions=$(jq -r '.[] | select(.number == '$pr_number') | .reactions.url | capture("/issues/(?\\d+)/reactions").issue_number' pulls.json || echo "") + + linked_issues_direct=$(jq -r '.[] | select(.number == '$pr_number') | .issue_url | capture("/issues/(?\\d+)$").issue_number' pulls.json || echo "") + + linked_issues=$(echo "$linked_issues_timeline, $linked_issues_body, $linked_issues_reactions, $linked_issues_direct" | sed 's/^, //; s/, $//; s/,,/,/g') + + has_issues=$(jq -r '.repository.has_issues' pulls.json || echo "false") + + jq -r --arg linked_issues "$linked_issues" --arg has_issues "$has_issues" \ + '.[] | select(.number == '$pr_number') | [ + .number, + .title, + .body, + .user.login, + .state, + .commits, + .changed_files, + (.labels | map(.name) | join(",")), + (.assignees | map(.login) | join(",")), + (.requested_reviewers | map(.login) | join(",")), + $linked_issues, + $has_issues + ] | @csv' pulls.json >> pull_requests.csv + done # Check the content of pull_requests.csv for debugging - name: Display pull_requests.csv content From c5ffc24dc711d03349cf06c395e4a8bf3faa40b8 Mon Sep 17 00:00:00 2001 From: Automated Date: Mon, 26 Aug 2024 02:57:01 +0000 Subject: [PATCH 49/49] Latest pull requests data: Mon Aug 26 02:57:01 UTC 2024 --- pull_requests.csv | 1427 +++++++++++++++++++++++++++++++++++++++++---- 1 file changed, 1326 insertions(+), 101 deletions(-) diff --git a/pull_requests.csv b/pull_requests.csv index eaca97d..7f4e5e3 100644 --- a/pull_requests.csv +++ b/pull_requests.csv @@ -1,101 +1,1326 @@ -PR Number,Title,Author,State,Labels,Assignees,Reviewers,Linked Issues -205,"Added data loading and cleaning Jupyter notebook","mru-hub","closed","","","","" -201,"feat: City of Los Angeles Evictions #179","rahul897","open","","","","" -199,"177 create 311 data csv files that can be accessed through a jupyter notebook","mru-hub","closed","","","salice","" -197,"N2020h issues to csv","n2020h","open","","n2020h","","" -196,"186 Adding Crime Data","dolla24","closed","","dolla24","","" -195,"fix name of image to work with Windows","salice","closed","","","","" -192,"Create issues-to-csv.yml","n2020h","closed","","","","" -188,"Moving Workshop-Feedback_Bigram_Analysis.ipynb from access-the-data repo","JessicaLucindaCheng","closed","","","","5127, 2607, 6139, 296" -187,"Moving Workshop-Feedback_Bigram_Analysis.ipynb from access-the-data repo","JessicaLucindaCheng","closed","","","","" -186,"Lalla la crime analysis 2","Lalla22","open","","","salice","" -185,"Create README.md","Lalla22","closed","","","","" -184,"git c","Lalla22","closed","","","","" -176,"build(deps): bump cryptography from 3.3.2 to 39.0.1 in /311-data/mapillarywrapper","dependabot[bot]","open","dependencies","","","" -175,"build(deps): bump certifi from 2021.5.30 to 2022.12.7 in /LAANE","dependabot[bot]","open","dependencies","","","" -174,"build(deps): bump certifi from 2020.6.20 to 2022.12.7 in /311-data/mapillarywrapper","dependabot[bot]","open","dependencies","","","" -173,"build(deps): bump nbconvert from 5.6.1 to 6.5.1 in /311-data/mapillarywrapper","dependabot[bot]","open","dependencies","","","172" -172,"build(deps): bump nbconvert from 5.6.1 to 6.3.0 in /311-data/mapillarywrapper","dependabot[bot]","closed","dependencies","","","" -171,"build(deps): bump mistune from 0.8.4 to 2.0.3 in /311-data/mapillarywrapper","dependabot[bot]","open","dependencies","","","" -168,"Updating label analysis jupyter notebook with co-occurrence analysis","rbianchetti","open","","","","" -167,"Merge pull request #164 from hackforla/160-survey-repo-labels","codemamma","open","","","","" -166,"build(deps): bump numpy from 1.21.2 to 1.22.0 in /LAANE","dependabot[bot]","open","dependencies","","","" -165,"build(deps): bump numpy from 1.18.5 to 1.22.0 in /311-data/mapillarywrapper","dependabot[bot]","open","dependencies","","","142" -164,"A python script that gets all the labels used in open issues across a…","rbianchetti","closed","","","","26" -159,"Linking latest pedestrian safety report presentation","henrykaplan","open","","","akhaleghi","" -158,"Data Science logo links and image files","henrykaplan","closed","","","akhaleghi","140" -142,"build(deps): bump numpy from 1.18.5 to 1.21.0 in /311-data/mapillarywrapper","dependabot[bot]","closed","dependencies","","","" -139,"Removed spaces from all filenames in pedestrian data directory","henrykaplan","closed","","","","" -132,"Pedestrian safety","henrykaplan","closed","","","","" -123,"Create a new issue template for epics","akhaleghi","closed","","","","120" -122,"Pedestrian safety draft visualizations and presentation","henrykaplan","closed","","","AlbertUlysses","" -117,"Pedestrian safety","henrykaplan","closed","","","","" -116,"webscraping folder initial commit","rajindermavi","closed","","","","" -115,"docs","AlbertUlysses","closed","","","","" -114,"docs","AlbertUlysses","closed","","","","" -113,"docs","AlbertUlysses","closed","","","","" -112,"build(deps): bump urllib3 from 1.24.3 to 1.26.5 in /LAANE","dependabot[bot]","closed","dependencies","","","" -111,"Docs!","AlbertUlysses","closed","","","","" -105,"debug","AlbertUlysses","closed","","","","" -104,"docs/clean up","AlbertUlysses","closed","","","","" -103,"feat: assessor script","AlbertUlysses","closed","","","","" -102,"docs","AlbertUlysses","closed","","","","" -101,"debug categorically inelligible","AlbertUlysses","closed","","","","" -100,"New scripts + debug","AlbertUlysses","closed","","","","" -99,"bugfix","AlbertUlysses","closed","","","","" -98,"feat: new script for warning","AlbertUlysses","closed","","","","" -97,"Two commits","AlbertUlysses","closed","","","","" -96,"feat: new processesing script","AlbertUlysses","closed","","","","" -95,"feat: airbnb dataset script","AlbertUlysses","closed","","","","" -93,"debug one fine stay scripts","AlbertUlysses","closed","","","","" -92,"debug","AlbertUlysses","closed","","","","" -91,"debug","AlbertUlysses","closed","","","","" -90,"fixbug in hsodenials","AlbertUlysses","closed","","","","" -89,"debug","AlbertUlysses","closed","","","","" -88,"bugfix: fixed exempt script","AlbertUlysses","closed","","","","" -87,"fixbug: fixed bug in categorically inelligible","AlbertUlysses","closed","","","","" -86,"bug fix: fixed bug for complaints data","AlbertUlysses","closed","","","","" -85,"refactor/debug: debugging each script","AlbertUlysses","closed","","","","" -84,"re-refactor normalize address","AlbertUlysses","closed","","","","" -83,"refactored","AlbertUlysses","closed","","","","" -82,"feat: hso_registrant script","AlbertUlysses","closed","","","","" -81,"refactor: no code change files moved","AlbertUlysses","closed","","","","" -79,"refactor: add a new file to handle multiple files","AlbertUlysses","closed","","","","" -78,"feat: new script","AlbertUlysses","closed","","","","" -77,"refactor: include steps for entire folder","AlbertUlysses","closed","","","","" -76,"refactor: move scripts to new folder","AlbertUlysses","closed","","","","" -75,"feat: platform one fine stay","AlbertUlysses","closed","","","","" -74,"refactor: refactor platform luxly","AlbertUlysses","closed","","","","" -73,"docs: update license","AlbertUlysses","closed","","","","" -72,"feat: platform table luxly files","AlbertUlysses","closed","","","","" -71,"feat: add hso revoked table","AlbertUlysses","closed","","","","" -70,"feat: exempt processing script","AlbertUlysses","closed","","","","" -69,"feat: noncompliant script","AlbertUlysses","closed","","","","" -68,"feat: categorically inelligible","AlbertUlysses","closed","","","","" -67,"feat: add complaints script","AlbertUlysses","closed","","","","" -64,"feat: tot insert file","AlbertUlysses","closed","","","","" -63,"feat: add new abstraction function","AlbertUlysses","closed","","","","" -62,"quick fix","AlbertUlysses","closed","","","","" -61,"feat: hso_denials insert function","AlbertUlysses","closed","","","","" -60,"build(deps): bump jupyterlab from 2.1.5 to 2.2.10 in /311-data/mapillarywrapper","dependabot[bot]","closed","dependencies","","","" -58,"docs: adding COPYING file","AlbertUlysses","closed","","","","" -57,"Feat: add Airbnb tables","AlbertUlysses","closed","","","","" -56,"feat: add SQL Alchemy models/Database","AlbertUlysses","closed","","","","" -55,"docs: rewrote some docstrings etc","AlbertUlysses","closed","","","","" -54,"feat: new code for date entries","AlbertUlysses","closed","","","","" -53,"refactor: update assessor table","AlbertUlysses","closed","","","","" -52,"docs: re-wrote some of the normalize_address information","AlbertUlysses","closed","","","","" -51,"refactored transformation scripts","AlbertUlysses","closed","","","","" -50,"tot update/completion","AlbertUlysses","closed","","","","" -49,"work on tot table","AlbertUlysses","closed","","","","" -48,"Add expemtion table transformations","AlbertUlysses","closed","","","","" -47,"updating folder layout and adding transformation scripts with tests","AlbertUlysses","closed","","","","" -43,"build(deps): bump urllib3 from 1.24.3 to 1.26.5 in /311-data/mapillarywrapper","dependabot[bot]","closed","dependencies","","","" -42,"Update Pedestrian Data folder","dplem","closed","","","","" -41,"Push request redo preprocessing files","KarinaLopez19","closed","","","AlbertUlysses","" -40,"Extend Mapillary API wrapper","henrykaplan","closed","","henrykaplan","dplem,AlbertUlysses","" -39,"add bbox function","AlbertUlysses","closed","","","","" -38,"this is a test","KarinaLopez19","closed","","","","" -37,"add new folder with new functions to help with clean up","AlbertUlysses","closed","","","","" -35,"clean up jupyter notebook","AlbertUlysses","closed","","","","" -34,"chore: clean up modules","AlbertUlysses","closed","","","","" +PR Number,Title,Description,Author,State,Number of Commits,Number of Files Changed,Labels,Assignees,Reviewers,Linked Issues,Has Issues +208,"Adding Cleaning script version3",,"mru-hub","open",,,"","","salice",", , 208","false" +205,"Added data loading and cleaning Jupyter notebook","- Initial implementation of data loading/ saving and cleaning. +- Improvements planned for future versions.","mru-hub","closed",,,"","","",", , 205","false" +201,"feat: City of Los Angeles Evictions #179","This commit introduces basic EDA on the LA evictions data, It introduces explode/splitting of Just Cause column for effective analysis + +starting #179 ","rahul897","open",,,"","","",", , 201","false" +199,"177 create 311 data csv files that can be accessed through a jupyter notebook","Cleaning rules document from the 311-data","mru-hub","closed",,,"","","salice",", , 199","false" +197,"N2020h issues to csv",,"n2020h","open",,,"","n2020h","",", , 197","false" +196,"186 Adding Crime Data",,"dolla24","closed",,,"","dolla24","",", , 196","false" +195,"fix name of image to work with Windows",,"salice","closed",,,"","","",", , 195","false" +192,"Create issues-to-csv.yml","testing feature branch with draft pull request","n2020h","closed",,,"","","",", , 192","false" +188,"Moving Workshop-Feedback_Bigram_Analysis.ipynb from access-the-data repo","Partially fixes https://github.com/hackforla/website/issues/6139 + +### What changes did you make? +- Adding Workshop-Feedback_Bigram_Analysis.ipynb from access-the-data repo (commit: https://github.com/hackforla/access-the-data/commit/62609601277509b0473bbf8f6dbf1ef735430c84) to this repo + +### Why did you make the changes? +- Requested by Bonnie per [this comment](https://github.com/hackforla/website/issues/5127#issuecomment-1776306588) +- AJ Price, Access the Data's PM, didn't want Jupyter Notebook to show up as a Language for the project, so Bonnie recommended moving the Workshop-Feedback_Bigram_Analysis.ipynb file to the Data Science repo. + +### Additional Notes +- I recommend using rebase merging to preserve who authored the file. +- To see examples of the commit history of the various methods of merging, see these branches in my forked repo: + - [Using rebase & merge](https://github.com/JessicaLucindaCheng/data-science/commits/rebase-merge): Recommended + - [Using merge commit](https://github.com/JessicaLucindaCheng/data-science/commits/merge-commit): Okay but results in 2 commits + - [Using squash & merge](https://github.com/JessicaLucindaCheng/data-science/commits/squash-merge): Not recommended because it doesn't preserve author's credit","JessicaLucindaCheng","closed",,,"","","","5127, 2607, 6139, 296, , , 188","false" +187,"Moving Workshop-Feedback_Bigram_Analysis.ipynb from access-the-data repo","Fixes N/A - + +### What changes did you make? +- Adding Workshop-Feedback_Bigram_Analysis.ipynb from access-the-data repo (commit: https://github.com/hackforla/access-the-data/commit/62609601277509b0473bbf8f6dbf1ef735430c84) to this repo + +### Why did you make the changes? +- Requested by Bonnie per [this comment](https://github.com/hackforla/website/issues/5127#issuecomment-1776306588) +- AJ Price, Access the Data's PM, didn't want Jupyter Notebook to show up as a Language for the project, so Bonnie recommended moving the Workshop-Feedback_Bigram_Analysis.ipynb file to the Data Science repo. + +### Additional Notes +- I recommend using rebase merging to preserve who authored the file. +- To see examples of the commit history of the various methods of merging, see these branches in my forked repo: + - [Using rebase & merge](https://github.com/JessicaLucindaCheng/data-science/commits/rebase-merge): Recommended + - [Using merge commit](https://github.com/JessicaLucindaCheng/data-science/commits/merge-commit): Okay but results in 2 commits + - [Using squash & merge](https://github.com/JessicaLucindaCheng/data-science/commits/squash-merge): Not recommended because it doesn't preserve author's credit","JessicaLucindaCheng","closed",,,"","","",", , 187","false" +186,"Lalla la crime analysis 2",,"Lalla22","open",,,"","","salice",", , 186","false" +185,"Create README.md",,"Lalla22","closed",,,"","","",", , 185","false" +184,"git c",,"Lalla22","closed",,,"","","",", , 184","false" +176,"build(deps): bump cryptography from 3.3.2 to 39.0.1 in /311-data/mapillarywrapper","Bumps [cryptography](https://github.com/pyca/cryptography) from 3.3.2 to 39.0.1. +
+Changelog +

Sourced from cryptography's changelog.

+
+

39.0.1 - 2023-02-07

+

+* **SECURITY ISSUE** - Fixed a bug where ``Cipher.update_into`` accepted Python
+  buffer protocol objects, but allowed immutable buffers. **CVE-2023-23931**
+* Updated Windows, macOS, and Linux wheels to be compiled with OpenSSL 3.0.8.
+

.. _v39-0-0:

+

39.0.0 - 2023-01-01 +

+
    +
  • BACKWARDS INCOMPATIBLE: Support for OpenSSL 1.1.0 has been removed. +Users on older version of OpenSSL will need to upgrade.
  • +
  • BACKWARDS INCOMPATIBLE: Dropped support for LibreSSL < 3.5. The new +minimum LibreSSL version is 3.5.0. Going forward our policy is to support +versions of LibreSSL that are available in versions of OpenBSD that are +still receiving security support.
  • +
  • BACKWARDS INCOMPATIBLE: Removed the encode_point and +from_encoded_point methods on +:class:~cryptography.hazmat.primitives.asymmetric.ec.EllipticCurvePublicNumbers, +which had been deprecated for several years. +:meth:~cryptography.hazmat.primitives.asymmetric.ec.EllipticCurvePublicKey.public_bytes +and +:meth:~cryptography.hazmat.primitives.asymmetric.ec.EllipticCurvePublicKey.from_encoded_point +should be used instead.
  • +
  • BACKWARDS INCOMPATIBLE: Support for using MD5 or SHA1 in +:class:~cryptography.x509.CertificateBuilder, other X.509 builders, and +PKCS7 has been removed.
  • +
  • BACKWARDS INCOMPATIBLE: Dropped support for macOS 10.10 and 10.11, macOS +users must upgrade to 10.12 or newer.
  • +
  • ANNOUNCEMENT: The next version of cryptography (40.0) will change +the way we link OpenSSL. This will only impact users who build +cryptography from source (i.e., not from a wheel), and specify their +own version of OpenSSL. For those users, the CFLAGS, LDFLAGS, +INCLUDE, LIB, and CRYPTOGRAPHY_SUPPRESS_LINK_FLAGS environment +variables will no longer be respected. Instead, users will need to +configure their builds as documented here_.
  • +
  • Added support for +:ref:disabling the legacy provider in OpenSSL 3.0.x<legacy-provider>.
  • +
  • Added support for disabling RSA key validation checks when loading RSA +keys via +:func:~cryptography.hazmat.primitives.serialization.load_pem_private_key, +:func:~cryptography.hazmat.primitives.serialization.load_der_private_key, +and +:meth:~cryptography.hazmat.primitives.asymmetric.rsa.RSAPrivateNumbers.private_key. +This speeds up key loading but is :term:unsafe if you are loading potentially +attacker supplied keys.
  • +
  • Significantly improved performance for +:class:~cryptography.hazmat.primitives.ciphers.aead.ChaCha20Poly1305
  • +
+ +
+

... (truncated)

+
+
+Commits + +
+
+ + +[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=cryptography&package-manager=pip&previous-version=3.3.2&new-version=39.0.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) + +Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. + +[//]: # (dependabot-automerge-start) +[//]: # (dependabot-automerge-end) + +--- + +
+Dependabot commands and options +
+ +You can trigger Dependabot actions by commenting on this PR: +- `@dependabot rebase` will rebase this PR +- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it +- `@dependabot merge` will merge this PR after your CI passes on it +- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it +- `@dependabot cancel merge` will cancel a previously requested merge and block automerging +- `@dependabot reopen` will reopen this PR if it is closed +- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually +- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) +- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language +- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language +- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language +- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language + +You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts). + +
","dependabot[bot]","open",,,"dependencies","","",", , 176","false" +175,"build(deps): bump certifi from 2021.5.30 to 2022.12.7 in /LAANE","Bumps [certifi](https://github.com/certifi/python-certifi) from 2021.5.30 to 2022.12.7. +
+Commits + +
+
+ + +[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=certifi&package-manager=pip&previous-version=2021.5.30&new-version=2022.12.7)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) + +Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. + +[//]: # (dependabot-automerge-start) +[//]: # (dependabot-automerge-end) + +--- + +
+Dependabot commands and options +
+ +You can trigger Dependabot actions by commenting on this PR: +- `@dependabot rebase` will rebase this PR +- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it +- `@dependabot merge` will merge this PR after your CI passes on it +- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it +- `@dependabot cancel merge` will cancel a previously requested merge and block automerging +- `@dependabot reopen` will reopen this PR if it is closed +- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually +- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) +- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language +- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language +- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language +- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language + +You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts). + +
","dependabot[bot]","open",,,"dependencies","","",", , 175","false" +174,"build(deps): bump certifi from 2020.6.20 to 2022.12.7 in /311-data/mapillarywrapper","Bumps [certifi](https://github.com/certifi/python-certifi) from 2020.6.20 to 2022.12.7. +
+Commits + +
+
+ + +[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=certifi&package-manager=pip&previous-version=2020.6.20&new-version=2022.12.7)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) + +Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. + +[//]: # (dependabot-automerge-start) +[//]: # (dependabot-automerge-end) + +--- + +
+Dependabot commands and options +
+ +You can trigger Dependabot actions by commenting on this PR: +- `@dependabot rebase` will rebase this PR +- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it +- `@dependabot merge` will merge this PR after your CI passes on it +- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it +- `@dependabot cancel merge` will cancel a previously requested merge and block automerging +- `@dependabot reopen` will reopen this PR if it is closed +- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually +- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) +- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language +- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language +- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language +- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language + +You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts). + +
","dependabot[bot]","open",,,"dependencies","","",", , 174","false" +173,"build(deps): bump nbconvert from 5.6.1 to 6.5.1 in /311-data/mapillarywrapper","Bumps [nbconvert](https://github.com/jupyter/nbconvert) from 5.6.1 to 6.5.1. +
+Release notes +

Sourced from nbconvert's releases.

+
+

Release 6.5.1

+

No release notes provided.

+

6.5.0

+

What's Changed

+ +

New Contributors

+ +

Full Changelog: https://github.com/jupyter/nbconvert/compare/6.4.5...6.5

+

6.4.3

+

What's Changed

+ +

New Contributors

+ +

Full Changelog: https://github.com/jupyter/nbconvert/compare/6.4.2...6.4.3

+

6.4.0

+

What's Changed

+ +

New Contributors

+ +
+

... (truncated)

+
+
+Commits + +
+
+ + +[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=nbconvert&package-manager=pip&previous-version=5.6.1&new-version=6.5.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) + +Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. + +[//]: # (dependabot-automerge-start) +[//]: # (dependabot-automerge-end) + +--- + +
+Dependabot commands and options +
+ +You can trigger Dependabot actions by commenting on this PR: +- `@dependabot rebase` will rebase this PR +- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it +- `@dependabot merge` will merge this PR after your CI passes on it +- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it +- `@dependabot cancel merge` will cancel a previously requested merge and block automerging +- `@dependabot reopen` will reopen this PR if it is closed +- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually +- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) +- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language +- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language +- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language +- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language + +You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts). + +
","dependabot[bot]","open",,,"dependencies","","","172, , , 173","false" +172,"build(deps): bump nbconvert from 5.6.1 to 6.3.0 in /311-data/mapillarywrapper","Bumps [nbconvert](https://github.com/jupyter/nbconvert) from 5.6.1 to 6.3.0. +
+Commits + +
+
+ + +[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=nbconvert&package-manager=pip&previous-version=5.6.1&new-version=6.3.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) + +Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. + +[//]: # (dependabot-automerge-start) +[//]: # (dependabot-automerge-end) + +--- + +
+Dependabot commands and options +
+ +You can trigger Dependabot actions by commenting on this PR: +- `@dependabot rebase` will rebase this PR +- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it +- `@dependabot merge` will merge this PR after your CI passes on it +- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it +- `@dependabot cancel merge` will cancel a previously requested merge and block automerging +- `@dependabot reopen` will reopen this PR if it is closed +- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually +- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) +- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language +- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language +- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language +- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language + +You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts). + +
","dependabot[bot]","closed",,,"dependencies","","",", , 172","false" +171,"build(deps): bump mistune from 0.8.4 to 2.0.3 in /311-data/mapillarywrapper","Bumps [mistune](https://github.com/lepture/mistune) from 0.8.4 to 2.0.3. +
+Release notes +

Sourced from mistune's releases.

+
+

Version 2.0.2

+

Fix escape_url via lepture/mistune#295

+

Version 2.0.1

+

Fix XSS for image link syntax.

+

Version 2.0.0

+

First release of Mistune v2.

+

Version 2.0.0 RC1

+

In this release, we have a Security Fix for harmful links.

+

Version 2.0.0 Alpha 1

+

This is the first release of v2. An alpha version for users to have a preview of the new mistune.

+
+
+
+Changelog +

Sourced from mistune's changelog.

+
+

Changelog

+

Here is the full history of mistune v2.

+

Version 2.0.4

+

+Released on Jul 15, 2022
+
    +
  • Fix url plugin in &lt;a&gt; tag
  • +
  • Fix * formatting
  • +
+

Version 2.0.3 +

+

Released on Jun 27, 2022

+
    +
  • Fix table plugin
  • +
  • Security fix for CVE-2022-34749
  • +
+

Version 2.0.2

+

+Released on Jan 14, 2022
+

Fix escape_url

+

Version 2.0.1 +

+

Released on Dec 30, 2021

+

XSS fix for image link syntax.

+

Version 2.0.0

+

+Released on Dec 5, 2021
+

This is the first non-alpha release of mistune v2.

+

Version 2.0.0rc1 +

+

Released on Feb 16, 2021

+

Version 2.0.0a6

+

+</tr></table> 
+
+
+

... (truncated)

+
+
+Commits +
    +
  • 3f422f1 Version bump 2.0.3
  • +
  • a6d4321 Fix asteris emphasis regex CVE-2022-34749
  • +
  • 5638e46 Merge pull request #307 from jieter/patch-1
  • +
  • 0eba471 Fix typo in guide.rst
  • +
  • 61e9337 Fix table plugin
  • +
  • 76dec68 Add documentation for renderer heading when TOC enabled
  • +
  • 799cd11 Version bump 2.0.2
  • +
  • babb0cf Merge pull request #295 from dairiki/bug.escape_url
  • +
  • fc2cd53 Make mistune.util.escape_url less aggressive
  • +
  • 3e8d352 Version bump 2.0.1
  • +
  • Additional commits viewable in compare view
  • +
+
+
+ + +[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=mistune&package-manager=pip&previous-version=0.8.4&new-version=2.0.3)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) + +Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. + +[//]: # (dependabot-automerge-start) +[//]: # (dependabot-automerge-end) + +--- + +
+Dependabot commands and options +
+ +You can trigger Dependabot actions by commenting on this PR: +- `@dependabot rebase` will rebase this PR +- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it +- `@dependabot merge` will merge this PR after your CI passes on it +- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it +- `@dependabot cancel merge` will cancel a previously requested merge and block automerging +- `@dependabot reopen` will reopen this PR if it is closed +- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually +- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) +- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language +- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language +- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language +- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language + +You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts). + +
","dependabot[bot]","open",,,"dependencies","","",", , 171","false" +168,"Updating label analysis jupyter notebook with co-occurrence analysis",,"rbianchetti","open",,,"","","",", , 168","false" +167,"Merge pull request #164 from hackforla/160-survey-repo-labels","A python script that gets all the labels used in open issues across a…","codemamma","open",,,"","","",", , 167","false" +166,"build(deps): bump numpy from 1.21.2 to 1.22.0 in /LAANE","Bumps [numpy](https://github.com/numpy/numpy) from 1.21.2 to 1.22.0. +
+Release notes +

Sourced from numpy's releases.

+
+

v1.22.0

+

NumPy 1.22.0 Release Notes

+

NumPy 1.22.0 is a big release featuring the work of 153 contributors +spread over 609 pull requests. There have been many improvements, +highlights are:

+
    +
  • Annotations of the main namespace are essentially complete. Upstream +is a moving target, so there will likely be further improvements, +but the major work is done. This is probably the most user visible +enhancement in this release.
  • +
  • A preliminary version of the proposed Array-API is provided. This is +a step in creating a standard collection of functions that can be +used across application such as CuPy and JAX.
  • +
  • NumPy now has a DLPack backend. DLPack provides a common interchange +format for array (tensor) data.
  • +
  • New methods for quantile, percentile, and related functions. The +new methods provide a complete set of the methods commonly found in +the literature.
  • +
  • A new configurable allocator for use by downstream projects.
  • +
+

These are in addition to the ongoing work to provide SIMD support for +commonly used functions, improvements to F2PY, and better documentation.

+

The Python versions supported in this release are 3.8-3.10, Python 3.7 +has been dropped. Note that 32 bit wheels are only provided for Python +3.8 and 3.9 on Windows, all other wheels are 64 bits on account of +Ubuntu, Fedora, and other Linux distributions dropping 32 bit support. +All 64 bit wheels are also linked with 64 bit integer OpenBLAS, which should fix +the occasional problems encountered by folks using truly huge arrays.

+

Expired deprecations

+

Deprecated numeric style dtype strings have been removed

+

Using the strings "Bytes0", "Datetime64", "Str0", "Uint32", +and "Uint64" as a dtype will now raise a TypeError.

+

(gh-19539)

+

Expired deprecations for loads, ndfromtxt, and mafromtxt in npyio

+

numpy.loads was deprecated in v1.15, with the recommendation that +users use pickle.loads instead. ndfromtxt and mafromtxt were both +deprecated in v1.17 - users should use numpy.genfromtxt instead with +the appropriate value for the usemask parameter.

+

(gh-19615)

+ +
+

... (truncated)

+
+
+Commits + +
+
+ + +[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=numpy&package-manager=pip&previous-version=1.21.2&new-version=1.22.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) + +Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. + +[//]: # (dependabot-automerge-start) +[//]: # (dependabot-automerge-end) + +--- + +
+Dependabot commands and options +
+ +You can trigger Dependabot actions by commenting on this PR: +- `@dependabot rebase` will rebase this PR +- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it +- `@dependabot merge` will merge this PR after your CI passes on it +- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it +- `@dependabot cancel merge` will cancel a previously requested merge and block automerging +- `@dependabot reopen` will reopen this PR if it is closed +- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually +- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) +- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language +- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language +- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language +- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language + +You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts). + +
","dependabot[bot]","open",,,"dependencies","","",", , 166","false" +165,"build(deps): bump numpy from 1.18.5 to 1.22.0 in /311-data/mapillarywrapper","Bumps [numpy](https://github.com/numpy/numpy) from 1.18.5 to 1.22.0. +
+Release notes +

Sourced from numpy's releases.

+
+

v1.22.0

+

NumPy 1.22.0 Release Notes

+

NumPy 1.22.0 is a big release featuring the work of 153 contributors +spread over 609 pull requests. There have been many improvements, +highlights are:

+
    +
  • Annotations of the main namespace are essentially complete. Upstream +is a moving target, so there will likely be further improvements, +but the major work is done. This is probably the most user visible +enhancement in this release.
  • +
  • A preliminary version of the proposed Array-API is provided. This is +a step in creating a standard collection of functions that can be +used across application such as CuPy and JAX.
  • +
  • NumPy now has a DLPack backend. DLPack provides a common interchange +format for array (tensor) data.
  • +
  • New methods for quantile, percentile, and related functions. The +new methods provide a complete set of the methods commonly found in +the literature.
  • +
  • A new configurable allocator for use by downstream projects.
  • +
+

These are in addition to the ongoing work to provide SIMD support for +commonly used functions, improvements to F2PY, and better documentation.

+

The Python versions supported in this release are 3.8-3.10, Python 3.7 +has been dropped. Note that 32 bit wheels are only provided for Python +3.8 and 3.9 on Windows, all other wheels are 64 bits on account of +Ubuntu, Fedora, and other Linux distributions dropping 32 bit support. +All 64 bit wheels are also linked with 64 bit integer OpenBLAS, which should fix +the occasional problems encountered by folks using truly huge arrays.

+

Expired deprecations

+

Deprecated numeric style dtype strings have been removed

+

Using the strings "Bytes0", "Datetime64", "Str0", "Uint32", +and "Uint64" as a dtype will now raise a TypeError.

+

(gh-19539)

+

Expired deprecations for loads, ndfromtxt, and mafromtxt in npyio

+

numpy.loads was deprecated in v1.15, with the recommendation that +users use pickle.loads instead. ndfromtxt and mafromtxt were both +deprecated in v1.17 - users should use numpy.genfromtxt instead with +the appropriate value for the usemask parameter.

+

(gh-19615)

+ +
+

... (truncated)

+
+
+Commits + +
+
+ + +[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=numpy&package-manager=pip&previous-version=1.18.5&new-version=1.22.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) + +Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. + +[//]: # (dependabot-automerge-start) +[//]: # (dependabot-automerge-end) + +--- + +
+Dependabot commands and options +
+ +You can trigger Dependabot actions by commenting on this PR: +- `@dependabot rebase` will rebase this PR +- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it +- `@dependabot merge` will merge this PR after your CI passes on it +- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it +- `@dependabot cancel merge` will cancel a previously requested merge and block automerging +- `@dependabot reopen` will reopen this PR if it is closed +- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually +- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) +- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language +- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language +- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language +- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language + +You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts). + +
","dependabot[bot]","open",,,"dependencies","","","142, , , 165","false" +164,"A python script that gets all the labels used in open issues across a…","…n organization(s) via GitHub API","rbianchetti","closed",,,"","","","26, , , 164","false" +159,"Linking latest pedestrian safety report presentation","This replaces a previous powerpoint presentation stored in the repo with links to the current report and presentation saved in Hack for LA's shared google drive.","henrykaplan","open",,,"","","akhaleghi",", , 159","false" +158,"Data Science logo links and image files","The latest version of the Hack for LA data science logo is being worked on in Google Drive. I've updated the directory in the git repo with links to the latest and with png and svg versions of the latest logo files.","henrykaplan","closed",,,"","","akhaleghi","140, , , 158","false" +142,"build(deps): bump numpy from 1.18.5 to 1.21.0 in /311-data/mapillarywrapper","Bumps [numpy](https://github.com/numpy/numpy) from 1.18.5 to 1.21.0. +
+Release notes +

Sourced from numpy's releases.

+
+

v1.21.0

+

NumPy 1.21.0 Release Notes

+

The NumPy 1.21.0 release highlights are

+
    +
  • continued SIMD work covering more functions and platforms,
  • +
  • initial work on the new dtype infrastructure and casting,
  • +
  • universal2 wheels for Python 3.8 and Python 3.9 on Mac,
  • +
  • improved documentation,
  • +
  • improved annotations,
  • +
  • new PCG64DXSM bitgenerator for random numbers.
  • +
+

In addition there are the usual large number of bug fixes and other +improvements.

+

The Python versions supported for this release are 3.7-3.9. Official +support for Python 3.10 will be added when it is released.

+

:warning: Warning: there are unresolved problems compiling NumPy 1.21.0 with gcc-11.1 .

+
    +
  • Optimization level -O3 results in many wrong warnings when running the tests.
  • +
  • On some hardware NumPy will hang in an infinite loop.
  • +
+

New functions

+

Add PCG64DXSM BitGenerator

+

Uses of the PCG64 BitGenerator in a massively-parallel context have +been shown to have statistical weaknesses that were not apparent at the +first release in numpy 1.17. Most users will never observe this weakness +and are safe to continue to use PCG64. We have introduced a new +PCG64DXSM BitGenerator that will eventually become the new default +BitGenerator implementation used by default_rng in future releases. +PCG64DXSM solves the statistical weakness while preserving the +performance and the features of PCG64.

+

See upgrading-pcg64 for more details.

+

(gh-18906)

+

Expired deprecations

+
    +
  • The shape argument numpy.unravel_index cannot be +passed as dims keyword argument anymore. (Was deprecated in NumPy +1.16.)
  • +
+ +
+

... (truncated)

+
+
+Commits +
    +
  • b235f9e Merge pull request #19283 from charris/prepare-1.21.0-release
  • +
  • 34aebc2 MAINT: Update 1.21.0-notes.rst
  • +
  • 493b64b MAINT: Update 1.21.0-changelog.rst
  • +
  • 07d7e72 MAINT: Remove accidentally created directory.
  • +
  • 032fca5 Merge pull request #19280 from charris/backport-19277
  • +
  • 7d25b81 BUG: Fix refcount leak in ResultType
  • +
  • fa5754e BUG: Add missing DECREF in new path
  • +
  • 61127bb Merge pull request #19268 from charris/backport-19264
  • +
  • 143d45f Merge pull request #19269 from charris/backport-19228
  • +
  • d80e473 BUG: Removed typing for == and != in dtypes
  • +
  • Additional commits viewable in compare view
  • +
+
+
+ + +[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=numpy&package-manager=pip&previous-version=1.18.5&new-version=1.21.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) + +Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. + +[//]: # (dependabot-automerge-start) +[//]: # (dependabot-automerge-end) + +--- + +
+Dependabot commands and options +
+ +You can trigger Dependabot actions by commenting on this PR: +- `@dependabot rebase` will rebase this PR +- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it +- `@dependabot merge` will merge this PR after your CI passes on it +- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it +- `@dependabot cancel merge` will cancel a previously requested merge and block automerging +- `@dependabot reopen` will reopen this PR if it is closed +- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually +- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) +- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language +- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language +- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language +- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language + +You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts). + +
","dependabot[bot]","closed",,,"dependencies","","",", , 142","false" +139,"Removed spaces from all filenames in pedestrian data directory","Changed filenames of all output files and graphics. No changes to file contents — these are final graphics and reports, and aren't aren't referenced by file path anywhere.","henrykaplan","closed",,,"","","",", , 139","false" +132,"Pedestrian safety","Added readme documentation for downloading and organizing data, running Jupyter notebooks, and reorganized analysis files.","henrykaplan","closed",,,"","","",", , 132","false" +123,"Create a new issue template for epics",,"akhaleghi","closed",,,"","","","120, , , 123","false" +122,"Pedestrian safety draft visualizations and presentation","Further simplified and clarified maps and traffic accident charts, added these new charts and related discussion to the presentation file.","henrykaplan","closed",,,"","","AlbertUlysses",", , 122","false" +117,"Pedestrian safety","- Add optional parameter to mapillary wrapper client to allow data requests of recent data only, to quickly update existing datasets. +- Better maps and visualizations. +- Use DBScan clustering algorithm to find clusters of traffic accidents (simplest way to roughly group accidents at specific intersections, as the accident data is mostly based around intersections). +- Updated report ppt for August 5 presentation to data science group","henrykaplan","closed",,,"","","",", , 117","false" +116,"webscraping folder initial commit","[Edited] + +This PR is a stand alone folder added to the 311-data folder. It contains a webscraping script for the tech stacks of each NC survey. See issue [44](https://github.com/hackforla/data-science/issues/44). The readme contains a link to the google sheets table of the tech used for each website.","rajindermavi","closed",,,"","","",", , 116","false" +115,"docs","updated readme to include sample queries that LAANE is interested in doing against the database","AlbertUlysses","closed",,,"","","",", , 115","false" +114,"docs","Update README/todos","AlbertUlysses","closed",,,"","","",", , 114","false" +113,"docs","Updated misc docs","AlbertUlysses","closed",,,"","","",", , 113","false" +112,"build(deps): bump urllib3 from 1.24.3 to 1.26.5 in /LAANE","Bumps [urllib3](https://github.com/urllib3/urllib3) from 1.24.3 to 1.26.5. +
+Release notes +

Sourced from urllib3's releases.

+
+

1.26.5

+

:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap

+
    +
  • Fixed deprecation warnings emitted in Python 3.10.
  • +
  • Updated vendored six library to 1.16.0.
  • +
  • Improved performance of URL parser when splitting the authority component.
  • +
+

If you or your organization rely on urllib3 consider supporting us via GitHub Sponsors

+

1.26.4

+

:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap

+
    +
  • Changed behavior of the default SSLContext when connecting to HTTPS proxy during HTTPS requests. The default SSLContext now sets check_hostname=True.
  • +
+

If you or your organization rely on urllib3 consider supporting us via GitHub Sponsors

+

1.26.3

+

:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap

+
    +
  • +

    Fixed bytes and string comparison issue with headers (Pull #2141)

    +
  • +
  • +

    Changed ProxySchemeUnknown error message to be more actionable if the user supplies a proxy URL without a scheme (Pull #2107)

    +
  • +
+

If you or your organization rely on urllib3 consider supporting us via GitHub Sponsors

+

1.26.2

+

:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap

+
    +
  • Fixed an issue where wrap_socket and CERT_REQUIRED wouldn't be imported properly on Python 2.7.8 and earlier (Pull #2052)
  • +
+

1.26.1

+

:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap

+
    +
  • Fixed an issue where two User-Agent headers would be sent if a User-Agent header key is passed as bytes (Pull #2047)
  • +
+

1.26.0

+

:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap

+
    +
  • +

    Added support for HTTPS proxies contacting HTTPS servers (Pull #1923, Pull #1806)

    +
  • +
  • +

    Deprecated negotiating TLSv1 and TLSv1.1 by default. Users that +still wish to use TLS earlier than 1.2 without a deprecation warning +should opt-in explicitly by setting ssl_version=ssl.PROTOCOL_TLSv1_1 (Pull #2002) +Starting in urllib3 v2.0: Connections that receive a DeprecationWarning will fail

    +
  • +
  • +

    Deprecated Retry options Retry.DEFAULT_METHOD_WHITELIST, Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST +and Retry(method_whitelist=...) in favor of Retry.DEFAULT_ALLOWED_METHODS, +Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT, and Retry(allowed_methods=...) +(Pull #2000) Starting in urllib3 v2.0: Deprecated options will be removed

    +
  • +
+ +
+

... (truncated)

+
+
+Changelog +

Sourced from urllib3's changelog.

+
+

1.26.5 (2021-05-26)

+
    +
  • Fixed deprecation warnings emitted in Python 3.10.
  • +
  • Updated vendored six library to 1.16.0.
  • +
  • Improved performance of URL parser when splitting +the authority component.
  • +
+

1.26.4 (2021-03-15)

+
    +
  • Changed behavior of the default SSLContext when connecting to HTTPS proxy +during HTTPS requests. The default SSLContext now sets check_hostname=True.
  • +
+

1.26.3 (2021-01-26)

+
    +
  • +

    Fixed bytes and string comparison issue with headers (Pull #2141)

    +
  • +
  • +

    Changed ProxySchemeUnknown error message to be +more actionable if the user supplies a proxy URL without +a scheme. (Pull #2107)

    +
  • +
+

1.26.2 (2020-11-12)

+
    +
  • Fixed an issue where wrap_socket and CERT_REQUIRED wouldn't +be imported properly on Python 2.7.8 and earlier (Pull #2052)
  • +
+

1.26.1 (2020-11-11)

+
    +
  • Fixed an issue where two User-Agent headers would be sent if a +User-Agent header key is passed as bytes (Pull #2047)
  • +
+

1.26.0 (2020-11-10)

+
    +
  • +

    NOTE: urllib3 v2.0 will drop support for Python 2. +Read more in the v2.0 Roadmap <https://urllib3.readthedocs.io/en/latest/v2-roadmap.html>_.

    +
  • +
  • +

    Added support for HTTPS proxies contacting HTTPS servers (Pull #1923, Pull #1806)

    +
  • +
  • +

    Deprecated negotiating TLSv1 and TLSv1.1 by default. Users that +still wish to use TLS earlier than 1.2 without a deprecation warning

    +
  • +
+ +
+

... (truncated)

+
+
+Commits +
    +
  • d161647 Release 1.26.5
  • +
  • 2d4a3fe Improve performance of sub-authority splitting in URL
  • +
  • 2698537 Update vendored six to 1.16.0
  • +
  • 07bed79 Fix deprecation warnings for Python 3.10 ssl module
  • +
  • d725a9b Add Python 3.10 to GitHub Actions
  • +
  • 339ad34 Use pytest==6.2.4 on Python 3.10+
  • +
  • f271c9c Apply latest Black formatting
  • +
  • 1884878 [1.26] Properly proxy EOF on the SSLTransport test suite
  • +
  • a891304 Release 1.26.4
  • +
  • 8d65ea1 Merge pull request from GHSA-5phf-pp7p-vc2r
  • +
  • Additional commits viewable in compare view
  • +
+
+
+ + +[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=urllib3&package-manager=pip&previous-version=1.24.3&new-version=1.26.5)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) + +Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. + +[//]: # (dependabot-automerge-start) +[//]: # (dependabot-automerge-end) + +--- + +
+Dependabot commands and options +
+ +You can trigger Dependabot actions by commenting on this PR: +- `@dependabot rebase` will rebase this PR +- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it +- `@dependabot merge` will merge this PR after your CI passes on it +- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it +- `@dependabot cancel merge` will cancel a previously requested merge and block automerging +- `@dependabot reopen` will reopen this PR if it is closed +- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually +- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) +- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language +- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language +- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language +- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language + +You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts). + +
","dependabot[bot]","closed",,,"dependencies","","",", , 112","false" +111,"Docs!","Updated docs like readme and add extra notes to some other files","AlbertUlysses","closed",,,"","","",", , 111","false" +105,"debug","debugged some code that was entering incorrect addresses","AlbertUlysses","closed",,,"","","",", , 105","false" +104,"docs/clean up","updated README and removed some old files.","AlbertUlysses","closed",,,"","","",", , 104","false" +103,"feat: assessor script","Last script, passes tests refactored a bit","AlbertUlysses","closed",,,"","","",", , 103","false" +102,"docs","Update Readme +removed a lot of personal notes aimed for me during script writing","AlbertUlysses","closed",,,"","","",", , 102","false" +101,"debug categorically inelligible","debug categorically inelligibl data","AlbertUlysses","closed",,,"","","",", , 101","false" +100,"New scripts + debug","added some code that debugs the ofs and luxly datasets. +Added a new script that handles warnings and citations.","AlbertUlysses","closed",,,"","","",", , 100","false" +99,"bugfix","fixed bugs across 4 files that dealth with addresses that returned 0 for zipcode instead of a 5 number digit","AlbertUlysses","closed",,,"","","",", , 99","false" +98,"feat: new script for warning","This script is for warning files that don't have only addresses and dates.","AlbertUlysses","closed",,,"","","",", , 98","false" +97,"Two commits","First commit fixes the warning script file. +The second commit adds an extra line describe what the script is for. +","AlbertUlysses","closed",,,"","","",", , 97","false" +96,"feat: new processesing script","script for airbnb reviews is complete.","AlbertUlysses","closed",,,"","","",", , 96","false" +95,"feat: airbnb dataset script","airbnb script that uploads listings and host information ","AlbertUlysses","closed",,,"","","",", , 95","false" +93,"debug one fine stay scripts","debug one fine stay scripts for address2 errors","AlbertUlysses","closed",,,"","","",", , 93","false" +92,"debug","debugged the data from one fine stay.","AlbertUlysses","closed",,,"","","",", , 92","false" +91,"debug","Did some debugging for luxly platform script","AlbertUlysses","closed",,,"","","",", , 91","false" +90,"fixbug in hsodenials","hsodenials bugs are remved, hsodenials script is good to go.","AlbertUlysses","closed",,,"","","",", , 90","false" +89,"debug","Debugged the hso_registrant enteries.","AlbertUlysses","closed",,,"","","",", , 89","false" +88,"bugfix: fixed exempt script","fixed exempt table to match the rest of the database","AlbertUlysses","closed",,,"","","",", , 88","false" +87,"fixbug: fixed bug in categorically inelligible","fixed categoically ineligible bug","AlbertUlysses","closed",,,"","","",", , 87","false" +86,"bug fix: fixed bug for complaints data","fixed complaints data to not have nulls and to not allow any state that don't use two letter abbreviation. ","AlbertUlysses","closed",,,"","","",", , 86","false" +85,"refactor/debug: debugging each script","Refactored tot script to make sure it is uploading to sqlite correctly.","AlbertUlysses","closed",,,"","","",", , 85","false" +84,"re-refactor normalize address","normalize address wrapper returns none as default again because that's how the library handle it, shouldn't return two different options.","AlbertUlysses","closed",,,"","","",", , 84","false" +83,"refactored","refactoring some transformation files to fix bug","AlbertUlysses","closed",,,"","","",", , 83","false" +82,"feat: hso_registrant script","HSO_Registrant script is complete. +","AlbertUlysses","closed",,,"","","",", , 82","false" +81,"refactor: no code change files moved","I moved some files around to better reflect where they belong. +For exapmle, assessor table file will be move to processingscripts folder because the code is unique to the assessor dataset. +However, normalize_address_wrapper will stay in the transformations folder because multiple scripts use this. +Asssesor file (and builds) will now have the custom code and later include the processing code as well.","AlbertUlysses","closed",,,"","","",", , 81","false" +79,"refactor: add a new file to handle multiple files","Remove old code that does multiple files for a job and include a new module that handles that.","AlbertUlysses","closed",,,"","","",", , 79","false" +78,"feat: new script","Script that enters one fine stay data into platform table but has a different column layout then the other one fine stay sheets.","AlbertUlysses","closed",,,"","","",", , 78","false" +77,"refactor: include steps for entire folder","Added some new code that helps with multiple fies in a folder. This should be moved into it's own module in the future.","AlbertUlysses","closed",,,"","","",", , 77","false" +76,"refactor: move scripts to new folder","move all scripting files to processingscript folder and leave helper functions in the transformations folder.","AlbertUlysses","closed",,,"","","",", , 76","false" +75,"feat: platform one fine stay","Adding the custom script for uploading one fine stay data into platform database.","AlbertUlysses","closed",,,"","","",", , 75","false" +74,"refactor: refactor platform luxly","refactor the platform luxly file to make it more readable and closer allign with wemake style guide.","AlbertUlysses","closed",,,"","","",", , 74","false" +73,"docs: update license","updated the license to reflect Hack For LA's best practices. ","AlbertUlysses","closed",,,"","","",", , 73","false" +72,"feat: platform table luxly files","Completed script for loading luxly files into the platform database.","AlbertUlysses","closed",,,"","","",", , 72","false" +71,"feat: add hso revoked table","The script for inserting HSO Revoked data is complete.","AlbertUlysses","closed",,,"","","",", , 71","false" +70,"feat: exempt processing script","The processing script for the ""exempt"" dataset is done.","AlbertUlysses","closed",,,"","","",", , 70","false" +69,"feat: noncompliant script","Add script that inserts noncompliant data into database.","AlbertUlysses","closed",,,"","","",", , 69","false" +68,"feat: categorically inelligible","added a new script for the categorically ineligible dataset.","AlbertUlysses","closed",,,"","","",", , 68","false" +67,"feat: add complaints script","Added a new script that inputs Complaints data into the database. +","AlbertUlysses","closed",,,"","","",", , 67","false" +64,"feat: tot insert file","New insert script complete. +","AlbertUlysses","closed",,,"","","",", , 64","false" +63,"feat: add new abstraction function","refactored some code and created a new abstraction function for inserting data. ","AlbertUlysses","closed",,,"","","",", , 63","false" +62,"quick fix","removed absolute path : quick fix","AlbertUlysses","closed",,,"","","",", , 62","false" +61,"feat: hso_denials insert function","Created a new file for hso_denial table, Added test for the custom functions. Added the code to insert the hso_denials data into the db. +1st dataset that's completely done +Need to go back to refactor the code because it's ugly","AlbertUlysses","closed",,,"","","",", , 61","false" +60,"build(deps): bump jupyterlab from 2.1.5 to 2.2.10 in /311-data/mapillarywrapper","Bumps [jupyterlab](https://github.com/jupyterlab/jupyterlab) from 2.1.5 to 2.2.10. +
+Commits +
    +
  • 87fff87 New version
  • +
  • 8b88bd1 update canvas version to allow build
  • +
  • d5e3649 Merge pull request from GHSA-4952-p58q-6crx
  • +
  • 9a8dadf Publish 2.2.9
  • +
  • 51fe0db bump version
  • +
  • e8e144b New version
  • +
  • a67a68e Merge pull request #9211 from meeseeksmachine/auto-backport-of-pr-9189-on-2.2.x
  • +
  • 1c7d14e Merge pull request #9173 from datalayer-contrib/2-2-x/revert-perf
  • +
  • b8c5203 Backport PR #9189: Update session and kernel manager data only if there was a...
  • +
  • bbc2959 Merge pull request #9168 from karlaspuldaro/notebook-kernel-display-2.2.x
  • +
  • Additional commits viewable in compare view
  • +
+
+
+ + +[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=jupyterlab&package-manager=pip&previous-version=2.1.5&new-version=2.2.10)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) + +Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. + +[//]: # (dependabot-automerge-start) +[//]: # (dependabot-automerge-end) + +--- + +
+Dependabot commands and options +
+ +You can trigger Dependabot actions by commenting on this PR: +- `@dependabot rebase` will rebase this PR +- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it +- `@dependabot merge` will merge this PR after your CI passes on it +- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it +- `@dependabot cancel merge` will cancel a previously requested merge and block automerging +- `@dependabot reopen` will reopen this PR if it is closed +- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually +- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) +- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language +- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language +- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language +- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language + +You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts). + +
","dependabot[bot]","closed",,,"dependencies","","",", , 60","false" +58,"docs: adding COPYING file","removed some old jupyter files and added a license file which is called COPYING per GNU's suggestion.","AlbertUlysses","closed",,,"","","",", , 58","false" +57,"Feat: add Airbnb tables","added airbnb tables and updated SQL Alchemy relationships.","AlbertUlysses","closed",,,"","","",", , 57","false" +56,"feat: add SQL Alchemy models/Database","added SQL Alchemy Models and the Database code. ","AlbertUlysses","closed",,,"","","",", , 56","false" +55,"docs: rewrote some docstrings etc","updated some doc strings in files to better reflect the intent.","AlbertUlysses","closed",,,"","","",", , 55","false" +54,"feat: new code for date entries","I added new code and tests for formatting date columns. ","AlbertUlysses","closed",,,"","","",", , 54","false" +53,"refactor: update assessor table","Did some refactoring that addressed these issues: + +- Rewrote the code to match the WeMake style guide. +- The functions are faster, and the memory is more efficient by ten times on average. +- The functions are pure now, with no added consequence to the DataFrames or Series passed into them. +All tests are still passing.","AlbertUlysses","closed",,,"","","",", , 53","false" +52,"docs: re-wrote some of the normalize_address information","Docs improvement for normailze_address","AlbertUlysses","closed",,,"","","",", , 52","false" +51,"refactored transformation scripts","refactored a lot and added extra notes in read me ","AlbertUlysses","closed",,,"","","",", , 51","false" +50,"tot update/completion","ToT table is done. Refactored it a bit - still passing test +","AlbertUlysses","closed",,,"","","",", , 50","false" +49,"work on tot table","WIP - started work on TOT table and tests I will finish in tomorrow and start on a new table.","AlbertUlysses","closed",,,"","","",", , 49","false" +48,"Add expemtion table transformations","I added the transformation functions for exempt table with their tests, renamed the main folder.","AlbertUlysses","closed",,,"","","",", , 48","false" +47,"updating folder layout and adding transformation scripts with tests","introduces the first of a few scripts to the project along with some tests. Needs refactoring but all tests are passing. + +","AlbertUlysses","closed",,,"","","",", , 47","false" +43,"build(deps): bump urllib3 from 1.24.3 to 1.26.5 in /311-data/mapillarywrapper","Bumps [urllib3](https://github.com/urllib3/urllib3) from 1.24.3 to 1.26.5. +
+Release notes +

Sourced from urllib3's releases.

+
+

1.26.5

+

:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap

+
    +
  • Fixed deprecation warnings emitted in Python 3.10.
  • +
  • Updated vendored six library to 1.16.0.
  • +
  • Improved performance of URL parser when splitting the authority component.
  • +
+

If you or your organization rely on urllib3 consider supporting us via GitHub Sponsors

+

1.26.4

+

:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap

+
    +
  • Changed behavior of the default SSLContext when connecting to HTTPS proxy during HTTPS requests. The default SSLContext now sets check_hostname=True.
  • +
+

If you or your organization rely on urllib3 consider supporting us via GitHub Sponsors

+

1.26.3

+

:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap

+
    +
  • +

    Fixed bytes and string comparison issue with headers (Pull #2141)

    +
  • +
  • +

    Changed ProxySchemeUnknown error message to be more actionable if the user supplies a proxy URL without a scheme (Pull #2107)

    +
  • +
+

If you or your organization rely on urllib3 consider supporting us via GitHub Sponsors

+

1.26.2

+

:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap

+
    +
  • Fixed an issue where wrap_socket and CERT_REQUIRED wouldn't be imported properly on Python 2.7.8 and earlier (Pull #2052)
  • +
+

1.26.1

+

:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap

+
    +
  • Fixed an issue where two User-Agent headers would be sent if a User-Agent header key is passed as bytes (Pull #2047)
  • +
+

1.26.0

+

:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap

+
    +
  • +

    Added support for HTTPS proxies contacting HTTPS servers (Pull #1923, Pull #1806)

    +
  • +
  • +

    Deprecated negotiating TLSv1 and TLSv1.1 by default. Users that +still wish to use TLS earlier than 1.2 without a deprecation warning +should opt-in explicitly by setting ssl_version=ssl.PROTOCOL_TLSv1_1 (Pull #2002) +Starting in urllib3 v2.0: Connections that receive a DeprecationWarning will fail

    +
  • +
  • +

    Deprecated Retry options Retry.DEFAULT_METHOD_WHITELIST, Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST +and Retry(method_whitelist=...) in favor of Retry.DEFAULT_ALLOWED_METHODS, +Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT, and Retry(allowed_methods=...) +(Pull #2000) Starting in urllib3 v2.0: Deprecated options will be removed

    +
  • +
+ +
+

... (truncated)

+
+
+Changelog +

Sourced from urllib3's changelog.

+
+

1.26.5 (2021-05-26)

+
    +
  • Fixed deprecation warnings emitted in Python 3.10.
  • +
  • Updated vendored six library to 1.16.0.
  • +
  • Improved performance of URL parser when splitting +the authority component.
  • +
+

1.26.4 (2021-03-15)

+
    +
  • Changed behavior of the default SSLContext when connecting to HTTPS proxy +during HTTPS requests. The default SSLContext now sets check_hostname=True.
  • +
+

1.26.3 (2021-01-26)

+
    +
  • +

    Fixed bytes and string comparison issue with headers (Pull #2141)

    +
  • +
  • +

    Changed ProxySchemeUnknown error message to be +more actionable if the user supplies a proxy URL without +a scheme. (Pull #2107)

    +
  • +
+

1.26.2 (2020-11-12)

+
    +
  • Fixed an issue where wrap_socket and CERT_REQUIRED wouldn't +be imported properly on Python 2.7.8 and earlier (Pull #2052)
  • +
+

1.26.1 (2020-11-11)

+
    +
  • Fixed an issue where two User-Agent headers would be sent if a +User-Agent header key is passed as bytes (Pull #2047)
  • +
+

1.26.0 (2020-11-10)

+
    +
  • +

    NOTE: urllib3 v2.0 will drop support for Python 2. +Read more in the v2.0 Roadmap <https://urllib3.readthedocs.io/en/latest/v2-roadmap.html>_.

    +
  • +
  • +

    Added support for HTTPS proxies contacting HTTPS servers (Pull #1923, Pull #1806)

    +
  • +
  • +

    Deprecated negotiating TLSv1 and TLSv1.1 by default. Users that +still wish to use TLS earlier than 1.2 without a deprecation warning

    +
  • +
+ +
+

... (truncated)

+
+
+Commits +
    +
  • d161647 Release 1.26.5
  • +
  • 2d4a3fe Improve performance of sub-authority splitting in URL
  • +
  • 2698537 Update vendored six to 1.16.0
  • +
  • 07bed79 Fix deprecation warnings for Python 3.10 ssl module
  • +
  • d725a9b Add Python 3.10 to GitHub Actions
  • +
  • 339ad34 Use pytest==6.2.4 on Python 3.10+
  • +
  • f271c9c Apply latest Black formatting
  • +
  • 1884878 [1.26] Properly proxy EOF on the SSLTransport test suite
  • +
  • a891304 Release 1.26.4
  • +
  • 8d65ea1 Merge pull request from GHSA-5phf-pp7p-vc2r
  • +
  • Additional commits viewable in compare view
  • +
+
+
+ + +[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=urllib3&package-manager=pip&previous-version=1.24.3&new-version=1.26.5)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) + +Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. + +[//]: # (dependabot-automerge-start) +[//]: # (dependabot-automerge-end) + +--- + +
+Dependabot commands and options +
+ +You can trigger Dependabot actions by commenting on this PR: +- `@dependabot rebase` will rebase this PR +- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it +- `@dependabot merge` will merge this PR after your CI passes on it +- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it +- `@dependabot cancel merge` will cancel a previously requested merge and block automerging +- `@dependabot reopen` will reopen this PR if it is closed +- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually +- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) +- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) +- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language +- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language +- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language +- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language + +You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts). + +
","dependabot[bot]","closed",,,"dependencies","","",", , 43","false" +42,"Update Pedestrian Data folder","Pedestrian Data folder including the relevant notebooks and visualizations.","dplem","closed",,,"","","",", , 42","false" +41,"Push request redo preprocessing files","","KarinaLopez19","closed",,,"","","AlbertUlysses",", , 41","false" +40,"Extend Mapillary API wrapper","These are new features for the python wrapper for Mapillary's API to allow it to: + +- Make requests of other layers of Mapillary data beyond traffic signs + +- Store photo image IDs associated with each item on map + +- Parse Mapillary's latitude/longitude string format + +- Give feedback while downloading data and timeout after 5 minutes, to prevent downloads from failing silently","henrykaplan","closed",,,"","henrykaplan","dplem,AlbertUlysses",", , 40","false" +39,"add bbox function","Created bbox function to help create a boundary box for cleaning data. Added one test to ensure any refactoring in the future still returns the correct expected results. ","AlbertUlysses","closed",,,"","","",", , 39","false" +38,"this is a test","","KarinaLopez19","closed",,,"","","",", , 38","false" +37,"add new folder with new functions to help with clean up","Adding the folder for airbnb listing and first function for helper functions that will be used for cleaning the data +","AlbertUlysses","closed",,,"","","",", , 37","false" +35,"clean up jupyter notebook","cleaned up the jupyter notebook to make it more user friendly","AlbertUlysses","closed",,,"","","",", , 35","false"