diff --git a/.github/workflows/issues-to-csv.yml b/.github/workflows/issues-to-csv.yml
new file mode 100644
index 0000000..a02c1a7
--- /dev/null
+++ b/.github/workflows/issues-to-csv.yml
@@ -0,0 +1,68 @@
+name: List Issues and Output as CSV
+
+on:
+ push:
+ branches:
+ - n2020h-issues-to-csv
+ workflow_dispatch:
+ schedule:
+ - cron: '*/5 * * * *'
+ issues:
+ types: [opened, closed, reopened]
+ pull_request:
+ types: [opened, closed, reopened]
+ branches:
+ - n2020h-issues-to-csv
+
+
+jobs:
+ list-issues:
+ runs-on: ubuntu-latest
+ steps:
+ # Checkout the repo to access any scripts or tools you might need
+ - name: Checkout repository
+ uses: actions/checkout@v3
+
+ # Generate issues CSV
+ # - name: Generate issues CSV
+ # run: |
+ # echo "Issue Number,Title,Labels" > issues.csv
+ # curl -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \
+ # "https://api.github.com/repos/${{ github.repository }}/issues?state=all&per_page=100" | \
+ # jq -r '.[] | select(.pull_request == null) | [.number, .title, (.labels | map(.name) | join(","))] | @csv' >> issues.csv
+ - name: Generate issues CSV
+ run: |
+ echo "Issue Number,Title,Labels,State,User Login,User ID,Assignee Login,Assignee ID" > issues.csv
+ curl -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \
+ "https://api.github.com/repos/${{ github.repository }}/issues?state=all&per_page=100" | \
+ jq -r '.[] | select(.pull_request == null) | [
+ .number,
+ .title,
+ (.labels | map(.name) | join(",")),
+ .state,
+ .user.login,
+ .user.id,
+ (.assignee | if . != null then .login else "None" end),
+ (.assignee | if . != null then .id else "None" end)
+ ] | @csv' >> issues.csv
+ # Generate issues CSV
+
+ - name: Commit and push
+ run: |
+ git config user.name "Automated"
+ git config user.email "actions@users.noreply.github.com"
+ git add -f issues.csv
+ timestamp=$(date -u)
+ git commit -m "Latest data: ${timestamp}" || exit 0
+ git push --force origin HEAD:refs/heads/n2020h-issues-to-csv
+
+ # # Upload CSV as artifact
+ # - name: Upload CSV as artifact
+ # uses: actions/upload-artifact@v2
+ # with:
+ # name: issues-csv
+ # path: issues.csv
+
+
+ # name: issues-csv
+ # path: issues.csv
diff --git a/.github/workflows/pull_requests_to_csv.yml b/.github/workflows/pull_requests_to_csv.yml
new file mode 100644
index 0000000..99e8554
--- /dev/null
+++ b/.github/workflows/pull_requests_to_csv.yml
@@ -0,0 +1,244 @@
+name: List Pull Requests and Output as CSV
+
+on:
+ push:
+ branches:
+ - n2020h-issues-to-csv
+ workflow_dispatch:
+ schedule:
+ - cron: '0 0 * * *' # Runs daily at midnight
+
+jobs:
+ list-pull-requests:
+ runs-on: ubuntu-latest
+
+ steps:
+ # Checkout the repository to access any scripts or tools you might need
+ - name: Checkout repository
+ uses: actions/checkout@v3
+
+ # Set up Node.js to use jq command
+ - name: Set up Node.js
+ uses: actions/setup-node@v3
+ with:
+ node-version: '20'
+
+ # Fetch pull requests data and save it to pulls.json
+ - name: Fetch pull requests data
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ run: |
+ curl -H "Authorization: token $GITHUB_TOKEN" \
+ -H "Accept: application/vnd.github.v3+json" \
+ "https://api.github.com/repos/${{ github.repository }}/pulls?state=all&per_page=100" \
+ -o pulls.json
+
+ # Fetch linked issues for each PR
+ - name: Fetch linked issues for each PR
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ run: |
+ for pr_number in $(jq -r '.[].number' pulls.json); do \
+ curl -H "Authorization: token $GITHUB_TOKEN" \
+ -H "Accept: application/vnd.github.v3+json" \
+ "https://api.github.com/repos/${{ github.repository }}/issues/$pr_number/timeline?per_page=100" \
+ -o "timeline_$pr_number.json"; \
+ done
+
+ # Debugging: Display timeline JSON for a PR
+ - name: Display timeline JSON content for a PR
+ run: cat timeline_*.json
+
+
+ # # Generate pull requests CSV including linked issues
+ # - name: Generate pull requests CSV including linked issues
+ # run: |
+ # echo "PR Number,Title,Author,State,Labels,Assignees,Reviewers,Linked Issues" > pull_requests.csv
+
+ # for pr_number in $(jq -r '.[].number' pulls.json); do
+ # timeline_file="timeline_$pr_number.json"
+
+ # # Ensure the timeline file is not empty before processing
+ # if [ -s "$timeline_file" ]; then
+ # linked_issues=$(jq -r '[.[] | select(.event == "cross-referenced" and .source.issue) | .source.issue.number | tostring] | join(", ")' "$timeline_file")
+ # else
+ # linked_issues=""
+ # fi
+
+ # jq -r --arg linked_issues "$linked_issues" \
+ # '.[] | select(.number == '$pr_number') | [
+ # .number,
+ # .title,
+ # .user.login,
+ # .state,
+ # (.labels | map(.name) | join(",")),
+ # (.assignees | map(.login) | join(",")),
+ # (.requested_reviewers | map(.login) | join(",")),
+ # $linked_issues
+ # ] | @csv' pulls.json >> pull_requests.csv
+ # done
+ - name: Generate pull requests CSV including linked issues and has_issues
+ run: |
+ echo "PR Number,Title,Description,Author,State,Number of Commits,Number of Files Changed,Labels,Assignees,Reviewers,Linked Issues,Has Issues" > pull_requests.csv
+
+ for pr_number in $(jq -r '.[].number' pulls.json); do
+ timeline_file="timeline_$pr_number.json"
+
+ if [ -f "$timeline_file" ]; then
+ linked_issues_timeline=$(jq -r '[.[] | select(.event == "cross-referenced" and .source.issue) | .source.issue.url | capture("/(?\\d+)$").issue_number] | join(", ")' "$timeline_file")
+ else
+ linked_issues_timeline=""
+ fi
+
+ linked_issues_body=$(jq -r '.[] | select(.number == '$pr_number') | .body | capture_all("#(?\\d+)"; "g") | join(",")' pulls.json || echo "")
+
+ linked_issues_reactions=$(jq -r '.[] | select(.number == '$pr_number') | .reactions.url | capture("/issues/(?\\d+)/reactions").issue_number' pulls.json || echo "")
+
+ linked_issues_direct=$(jq -r '.[] | select(.number == '$pr_number') | .issue_url | capture("/issues/(?\\d+)$").issue_number' pulls.json || echo "")
+
+ linked_issues=$(echo "$linked_issues_timeline, $linked_issues_body, $linked_issues_reactions, $linked_issues_direct" | sed 's/^, //; s/, $//; s/,,/,/g')
+
+ has_issues=$(jq -r '.repository.has_issues' pulls.json || echo "false")
+
+ jq -r --arg linked_issues "$linked_issues" --arg has_issues "$has_issues" \
+ '.[] | select(.number == '$pr_number') | [
+ .number,
+ .title,
+ .body,
+ .user.login,
+ .state,
+ .commits,
+ .changed_files,
+ (.labels | map(.name) | join(",")),
+ (.assignees | map(.login) | join(",")),
+ (.requested_reviewers | map(.login) | join(",")),
+ $linked_issues,
+ $has_issues
+ ] | @csv' pulls.json >> pull_requests.csv
+ done
+
+ # Check the content of pull_requests.csv for debugging
+ - name: Display pull_requests.csv content
+ run: cat pull_requests.csv
+
+ # Commit and push the generated CSV to the repository
+ - name: Commit and push CSV
+ run: |
+ git config user.name "Automated"
+ git config user.email "actions@users.noreply.github.com"
+ git add -f pull_requests.csv
+ timestamp=$(date -u)
+ git commit -m "Latest pull requests data: ${timestamp}" || exit 0
+ git push --force origin HEAD:refs/heads/n2020h-issues-to-csv
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+
+
+##------------------------------------------##
+# name: List Pull Requests and Output as CSV
+
+# on:
+# push:
+# branches:
+# - n2020h-issues-to-csv
+# workflow_dispatch:
+# schedule:
+# - cron: '0 0 * * *' # Runs daily at midnight
+# # pull_request:
+# # types: [opened, closed, reopened]
+# # branches:
+# # - n2020h-issues-to-csv
+# jobs:
+# list-pull-requests:
+# runs-on: ubuntu-latest
+
+# steps:
+# # Checkout the repository to access any scripts or tools you might need
+# - name: Checkout repository
+# uses: actions/checkout@v3
+
+# # Set up Node.js to use jq command
+# - name: Set up Node.js
+# uses: actions/setup-node@v3
+# with:
+# node-version: '20'
+# # Fetch pull requests data and save it to pulls.json
+# - name: Fetch pull requests data
+# env:
+# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+# run: |
+# curl -H "Authorization: token $GITHUB_TOKEN" \
+# -H "Accept: application/vnd.github.v3+json" \
+# "https://api.github.com/repos/${{ github.repository }}/pulls?state=all&per_page=100" \
+# -o pulls.json
+
+# # Check the content of pulls.json for debugging
+# - name: Display pulls.json content
+# run: cat pulls.json
+
+# # Generate pull requests CSV
+# # (.body | capture_all("#(?\\d+)"; "g") | join(","))
+# - name: Generate pull requests CSV
+# run: |
+# echo "PR Number,Title,Description,Author,State,Number of Commits,Number of Files Changed,Labels,Assignees,Reviewers, Linked Issues" > pull_requests.csv
+# jq -r '.[] | select(.user.login != "dependabot[bot]") | [
+# .number,
+# .title,
+# .body,
+# .user.login,
+# .state,
+# .commits,
+# .changed_files,
+# (.labels | map(.name) | join(",")),
+# (.assignees | map(.login) | join(",")),
+# (.requested_reviewers | map(.login) | join(",")),
+# (if .body != null then .body | gsub("#";" ") | split(" ") | map(select(startswith("issue_number"))) | join(",") else "" end)
+# ] | @csv' pulls.json >> pull_requests.csv
+
+# # Check the content of pull_requests.csv for debugging
+# - name: Display pull_requests.csv content
+# run: cat pull_requests.csv
+
+# # Commit and push the generated CSV to the repository
+# - name: Commit and push CSV
+# run: |
+# git config user.name "Automated"
+# git config user.email "actions@users.noreply.github.com"
+# git add -f pull_requests.csv
+# timestamp=$(date -u)
+# git commit -m "Latest pull requests data: ${timestamp}" || exit 0
+# git push --force origin HEAD:refs/heads/n2020h-issues-to-csv
+# env:
+# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+
+####-------------------------------------------------#######
+
+ # # Generate pull requests CSV
+ # - name: Generate pull requests CSV
+ # run: |
+ # run: |
+ # echo "PR Number,Title,Description,Author,State,Number of Commits,Number of Files Changed,Labels,Assignees,Reviewers" > hackforla_PRs.csv
+ # jq -r '.[] | [
+ # .number,
+ # .title,
+ # .body,
+ # .user.login,
+ # .state,
+ # .commits,
+ # .changed_files,
+ # (.labels | map(.name) | join(",")),
+ # (.assignees | map(.login) | join(",")),
+ # (.requested_reviewers | map(.login) | join(","))
+ # ] | @csv' pulls.json >> hackforla_PRs.csv
+
+ # # Commit and push the generated CSV to the repository
+ # - name: Commit and push CSV
+ # run: |
+ # git config user.name "Automated"
+ # git config user.email "actions@users.noreply.github.com"
+ # git add -f hackforla_PRs.csv
+ # timestamp=$(date -u)
+ # git commit -m "Latest data: ${timestamp}" || exit 0
+ # git push --force origin HEAD:refs/heads/n2020h-issues-to-csv
+ # env:
+ # GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/issues.csv b/issues.csv
new file mode 100644
index 0000000..da09118
--- /dev/null
+++ b/issues.csv
@@ -0,0 +1,53 @@
+Issue Number,Title,Labels,State,User Login,User ID,Assignee Login,Assignee ID
+209,"[Job Match] Explore data, perform EDA and relevant data science work","","open","sudhara",7834544,"sudhara",7834544
+207,"LA Controller Data Projects","CoP: Data Science","open","salice",4333657,"FragariaChemist",103977933
+206,"LA Metro Real Time Transit Data","CoP: Data Science","open","salice",4333657,"tpham16",110870494
+204,"Recruit volunteers for team open roles","role: missing,epic,ready for product,size: 0.25pt,feature: recruiting","open","akhaleghi",7635911,"None","None"
+203,"Prep project boards for Migration","role: product","open","akhaleghi",7635911,"akhaleghi",7635911
+202,"Information for New and Existing Members","CoP: Data Science","open","akhaleghi",7635911,"None","None"
+200,"EPA Data Set","","open","akhaleghi",7635911,"KarinaLopez19",22568552
+198,"Update the About us page on wiki","role: product,feature: onboarding,project duration: one time,ready for product,project: Data Science CoP Maintenance,size: 0.25pt","open","ExperimentsInHonesty",37763229,"max1million101",122141183
+194,"Create data dictionary (EDA task)","feature: missing,role: missing,size: missing,project: missing","open","Lalla22",47159210,"None","None"
+193,"DRAFT: Access to ""Third Spaces""","feature: missing,role: missing,size: missing,project: missing","open","akhaleghi",7635911,"None","None"
+191,"DRAFT: MediaWiki NLP Project","feature: missing,role: missing,size: missing,project: missing","open","akhaleghi",7635911,"salice",4333657
+190,"Create Base64 and Env File Tutorial","feature: missing,role: missing,size: missing,project: missing","open","kimberlytanyh",76601090,"None","None"
+189,"Refining the Requirements for Skills Match Dashboard","role: missing,size: missing,project: missing,feature: skills / productivity dashboard","open","n2020h",72112832,"n2020h",72112832
+183,"MERL Center Data Cleaning","role: missing,project duration: one time,size: 1pt,project: MC Southern Africa NGOs' OS usage","closed","Rabia2219",75643389,"None","None"
+182,"CoP: Data Science: Active and Inactive Businesses of LA County","role: data science,epic,size: 3pt,project: EDA","open","akhaleghi",7635911,"rahul897",5197842
+181,"CoP: Data Science: Affordable Housing Access to Food Pantries","role: data science,epic,size: 3pt,project: EDA","closed","akhaleghi",7635911,"MDivyaPrakash",22434989
+180,"City of Los Angeles Arrests","role: data science,epic,size: 3pt,project: EDA","open","akhaleghi",7635911,"dolla24",13824693
+179,"CoP: Data Science: City of Los Angeles Evictions","role: data science,epic,size: 3pt,project: EDA","closed","akhaleghi",7635911,"rahul897",5197842
+178,"CoP: Data Science: Find data sources through the LA Controller's office","documentation,good first issue,role: data analysis,size: 1pt,project: missing","open","akhaleghi",7635911,"RomyPatel",54936257
+177,"Create 311 data CSV files that can be accessed through a Jupyter notebook","role: data science,project: 311 Data,size: 3pt,feature: project management","open","akhaleghi",7635911,"mru-hub",105573589
+170,"California Grants Portal Data Set EDA","role: data analysis,project duration: ongoing,size: 1pt,project: EDA","closed","akhaleghi",7635911,"jossus657",97187879
+169,"Potential Projects: California Grants Portal","role: data science,epic,size: epic,dependency,project: EDA","closed","akhaleghi",7635911,"mihikasahani",102847564
+163,"CoP: Data Science: Complete Intro to Command Line and Git Tutorial","feature: guide,role: data science,size: 1pt,role: Data Engineer","open","akhaleghi",7635911,"ZeelDesai00",119442574
+162,"CoP: Data Science: Complete Intro to Python Tutorial","feature: guide,role: data science,size: 1pt,draft,ready for product","open","akhaleghi",7635911,"SaiPranaswi23",130614655
+161,"Project Name: Lucky Parking","feature: missing,role: data analysis,size: 1pt,project: missing","closed","gregpawin",36276149,"None","None"
+157,"CoP: Data Science: Create Deep Learning Tutorial","documentation,feature: guide,role: data science,size: 1pt","open","akhaleghi",7635911,"elliealbertson",120353163
+156,"CoP: Data Science: Create Machine Learning Tutorial","documentation,feature: guide,role: data science,size: 1pt","open","akhaleghi",7635911,"BhavanaSai12",173202606
+155,"CoP: Data Science: Create Stats Tutorial","documentation,feature: guide,role: data science,role: data analysis,size: 1pt","open","akhaleghi",7635911,"nehathombare21",163942514
+154,"CoP: Data Science: Create Data Ops Tutorial","documentation,feature: guide,size: 1pt,role: Data Engineer","open","akhaleghi",7635911,"endlesslupita",6325722
+153,"CoP: Data Science: Create Text Analysis Tutorial","documentation,feature: guide,role: data analysis,size: 1pt","open","akhaleghi",7635911,"bfang22",111911687
+152,"CoP: Data Science: Open Source Projects Data Set for Climate projects","role: data analysis,size: 1pt,epic: GreenEarthOS,project: climate projects,draft","open","akhaleghi",7635911,"noneill256",100643509
+149,"Weekly Label Check","role: product,size: 1pt,feature: project management","open","ExperimentsInHonesty",37763229,"None","None"
+160,"CoP: Data Science: Survey: Repo Labels","project duration: ongoing,size: 2pt,time sensitive,role: Data Engineer,feature: labels,feature: Issues Dashboard","closed","Neecolaa",7437035,"jossus657",97187879
+148,"Create Geospatial Data Analysis Tutorial","documentation,feature: guide,role: org,size: 1pt","closed","ryanmswan",2508797,"mcmorgan27",3988983
+147,"Create Data Engineering Tutorial","documentation,feature: guide,role: org,size: 1pt","closed","ryanmswan",2508797,"jonmelvin",25702027
+146,"Create Web Scraping Tutorial","documentation,feature: guide,role: org,size: 1pt","open","ryanmswan",2508797,"parcheesime",38143160
+145,"CoP: Data Science: Create Data Analysis With R Tutorial","documentation,feature: guide,role: data analysis,size: 1pt","open","ryanmswan",2508797,"xuyeluo",169718490
+144,"Create Data Visualization Tutorial","documentation,feature: guide,role: org,size: 1pt,feature: needs peer review","closed","ryanmswan",2508797,"None","None"
+143,"Create ETL/Data Cleaning Tutorial","documentation,feature: guide,role: org,size: 1pt","closed","ryanmswan",2508797,"ryanmswan",2508797
+141,"Obtain Shape Files for Different District Types (2023)","good first issue,role: data analysis,project duration: one time,dependency,size: 1pt,project: district types","closed","akhaleghi",7635911,"mru-hub",105573589
+140,"Create a logo for the Data Science CoP","project duration: one time,size: 2pt,feature: branding,role: design","closed","henrykaplan",50933869,"None","None"
+138,"Starter Project for New Data Analysts and Data Scientists","role: product,role: CoP lead,feature: onboarding,size: epic,dependency","closed","akhaleghi",7635911,"None","None"
+137,"Data Science CoP Meeting Agendas (Monday 7PM PST)","feature: guide,role: product,size: 1pt,feature: agenda","open","akhaleghi",7635911,"akhaleghi",7635911
+135,"CoP: Data Science: Find and document all the 311 public data sets","role: data analysis,project: 311 Data,size: 2pt,epic: 311 public data sets","open","ExperimentsInHonesty",37763229,"venkata-sai-swathi",61697502
+134,"Overview Dashboard - add titles to graphs","feature: guide,role: data science,project: 311-data-dashboards,size: 1pt,epic: 311 public data sets","closed","chelseybeck",64881557,"chelseybeck",64881557
+133,"Create labels, features, milestones","role: product,dependency,size: 3pt,feature: project management","open","ExperimentsInHonesty",37763229,"None","None"
+131,"Story Size for Data Science Issues","role: product,size: 1pt,feature: project management","closed","akhaleghi",7635911,"akhaleghi",7635911
+130,"Create a Guide: Web Scraping","Guide: Research,feature: guide,role: org,size: 5pt,CoP: Data Science","open","akhaleghi",7635911,"parcheesime",38143160
+124,"Obtain Shape Files for Different District Types as of Nov/Dec 2021","feature: guide,role: data analysis,project duration: one time,size: 1pt,project: district types","closed","akhaleghi",7635911,"None","None"
+121,"Template For Creating Epics For Data Science Projects","role: product,size: 1pt,feature: project management","closed","akhaleghi",7635911,"akhaleghi",7635911
+120,"Structured Context for HfLA created data sets","feature: guide,role: data science,size: epic,project: structured context,TG: Draft Template,CoP: Data Science","open","ExperimentsInHonesty",37763229,"None","None"
+118,"CoP: Data Science: Create district types reusable tool (API, single dataset, etc.)","feature: guide,role: data analysis,epic,size: epic,size: 1pt","open","ExperimentsInHonesty",37763229,"parcheesime",38143160
diff --git a/my_org_PRs.csv b/my_org_PRs.csv
new file mode 100644
index 0000000..4c2fa8f
--- /dev/null
+++ b/my_org_PRs.csv
@@ -0,0 +1,154 @@
+PR Number,Title,Description,Author,State,Number of Commits,Number of Files Changed,Labels,Assignees,Reviewers
+205,"Added data loading and cleaning Jupyter notebook","- Initial implementation of data loading/ saving and cleaning.
+- Improvements planned for future versions.","mru-hub","closed",,,"","",""
+201,"feat: City of Los Angeles Evictions #179","This commit introduces basic EDA on the LA evictions data, It introduces explode/splitting of Just Cause column for effective analysis
+
+starting #179 ","rahul897","open",,,"","",""
+199,"177 create 311 data csv files that can be accessed through a jupyter notebook","Cleaning rules document from the 311-data","mru-hub","closed",,,"","","salice"
+197,"N2020h issues to csv",,"n2020h","open",,,"","",""
+196,"186 Adding Crime Data",,"dolla24","closed",,,"","dolla24",""
+195,"fix name of image to work with Windows",,"salice","closed",,,"","",""
+192,"Create issues-to-csv.yml","testing feature branch with draft pull request","n2020h","closed",,,"","",""
+188,"Moving Workshop-Feedback_Bigram_Analysis.ipynb from access-the-data repo","Partially fixes https://github.com/hackforla/website/issues/6139
+
+### What changes did you make?
+- Adding Workshop-Feedback_Bigram_Analysis.ipynb from access-the-data repo (commit: https://github.com/hackforla/access-the-data/commit/62609601277509b0473bbf8f6dbf1ef735430c84) to this repo
+
+### Why did you make the changes?
+- Requested by Bonnie per [this comment](https://github.com/hackforla/website/issues/5127#issuecomment-1776306588)
+- AJ Price, Access the Data's PM, didn't want Jupyter Notebook to show up as a Language for the project, so Bonnie recommended moving the Workshop-Feedback_Bigram_Analysis.ipynb file to the Data Science repo.
+
+### Additional Notes
+- I recommend using rebase merging to preserve who authored the file.
+- To see examples of the commit history of the various methods of merging, see these branches in my forked repo:
+ - [Using rebase & merge](https://github.com/JessicaLucindaCheng/data-science/commits/rebase-merge): Recommended
+ - [Using merge commit](https://github.com/JessicaLucindaCheng/data-science/commits/merge-commit): Okay but results in 2 commits
+ - [Using squash & merge](https://github.com/JessicaLucindaCheng/data-science/commits/squash-merge): Not recommended because it doesn't preserve author's credit","JessicaLucindaCheng","closed",,,"","",""
+187,"Moving Workshop-Feedback_Bigram_Analysis.ipynb from access-the-data repo","Fixes N/A -
+
+### What changes did you make?
+- Adding Workshop-Feedback_Bigram_Analysis.ipynb from access-the-data repo (commit: https://github.com/hackforla/access-the-data/commit/62609601277509b0473bbf8f6dbf1ef735430c84) to this repo
+
+### Why did you make the changes?
+- Requested by Bonnie per [this comment](https://github.com/hackforla/website/issues/5127#issuecomment-1776306588)
+- AJ Price, Access the Data's PM, didn't want Jupyter Notebook to show up as a Language for the project, so Bonnie recommended moving the Workshop-Feedback_Bigram_Analysis.ipynb file to the Data Science repo.
+
+### Additional Notes
+- I recommend using rebase merging to preserve who authored the file.
+- To see examples of the commit history of the various methods of merging, see these branches in my forked repo:
+ - [Using rebase & merge](https://github.com/JessicaLucindaCheng/data-science/commits/rebase-merge): Recommended
+ - [Using merge commit](https://github.com/JessicaLucindaCheng/data-science/commits/merge-commit): Okay but results in 2 commits
+ - [Using squash & merge](https://github.com/JessicaLucindaCheng/data-science/commits/squash-merge): Not recommended because it doesn't preserve author's credit","JessicaLucindaCheng","closed",,,"","",""
+186,"Lalla la crime analysis 2",,"Lalla22","open",,,"","","salice"
+185,"Create README.md",,"Lalla22","closed",,,"","",""
+184,"git c",,"Lalla22","closed",,,"","",""
+168,"Updating label analysis jupyter notebook with co-occurrence analysis",,"rbianchetti","open",,,"","",""
+167,"Merge pull request #164 from hackforla/160-survey-repo-labels","A python script that gets all the labels used in open issues across a…","codemamma","open",,,"","",""
+164,"A python script that gets all the labels used in open issues across a…","…n organization(s) via GitHub API","rbianchetti","closed",,,"","",""
+159,"Linking latest pedestrian safety report presentation","This replaces a previous powerpoint presentation stored in the repo with links to the current report and presentation saved in Hack for LA's shared google drive.","henrykaplan","open",,,"","","akhaleghi"
+158,"Data Science logo links and image files","The latest version of the Hack for LA data science logo is being worked on in Google Drive. I've updated the directory in the git repo with links to the latest and with png and svg versions of the latest logo files.","henrykaplan","closed",,,"","","akhaleghi"
+139,"Removed spaces from all filenames in pedestrian data directory","Changed filenames of all output files and graphics. No changes to file contents — these are final graphics and reports, and aren't aren't referenced by file path anywhere.","henrykaplan","closed",,,"","",""
+132,"Pedestrian safety","Added readme documentation for downloading and organizing data, running Jupyter notebooks, and reorganized analysis files.","henrykaplan","closed",,,"","",""
+123,"Create a new issue template for epics",,"akhaleghi","closed",,,"","",""
+122,"Pedestrian safety draft visualizations and presentation","Further simplified and clarified maps and traffic accident charts, added these new charts and related discussion to the presentation file.","henrykaplan","closed",,,"","","AlbertUlysses"
+117,"Pedestrian safety","- Add optional parameter to mapillary wrapper client to allow data requests of recent data only, to quickly update existing datasets.
+- Better maps and visualizations.
+- Use DBScan clustering algorithm to find clusters of traffic accidents (simplest way to roughly group accidents at specific intersections, as the accident data is mostly based around intersections).
+- Updated report ppt for August 5 presentation to data science group","henrykaplan","closed",,,"","",""
+116,"webscraping folder initial commit","[Edited]
+
+This PR is a stand alone folder added to the 311-data folder. It contains a webscraping script for the tech stacks of each NC survey. See issue [44](https://github.com/hackforla/data-science/issues/44). The readme contains a link to the google sheets table of the tech used for each website.","rajindermavi","closed",,,"","",""
+115,"docs","updated readme to include sample queries that LAANE is interested in doing against the database","AlbertUlysses","closed",,,"","",""
+114,"docs","Update README/todos","AlbertUlysses","closed",,,"","",""
+113,"docs","Updated misc docs","AlbertUlysses","closed",,,"","",""
+111,"Docs!","Updated docs like readme and add extra notes to some other files","AlbertUlysses","closed",,,"","",""
+105,"debug","debugged some code that was entering incorrect addresses","AlbertUlysses","closed",,,"","",""
+104,"docs/clean up","updated README and removed some old files.","AlbertUlysses","closed",,,"","",""
+103,"feat: assessor script","Last script, passes tests refactored a bit","AlbertUlysses","closed",,,"","",""
+102,"docs","Update Readme
+removed a lot of personal notes aimed for me during script writing","AlbertUlysses","closed",,,"","",""
+101,"debug categorically inelligible","debug categorically inelligibl data","AlbertUlysses","closed",,,"","",""
+100,"New scripts + debug","added some code that debugs the ofs and luxly datasets.
+Added a new script that handles warnings and citations.","AlbertUlysses","closed",,,"","",""
+99,"bugfix","fixed bugs across 4 files that dealth with addresses that returned 0 for zipcode instead of a 5 number digit","AlbertUlysses","closed",,,"","",""
+98,"feat: new script for warning","This script is for warning files that don't have only addresses and dates.","AlbertUlysses","closed",,,"","",""
+97,"Two commits","First commit fixes the warning script file.
+The second commit adds an extra line describe what the script is for.
+","AlbertUlysses","closed",,,"","",""
+96,"feat: new processesing script","script for airbnb reviews is complete.","AlbertUlysses","closed",,,"","",""
+95,"feat: airbnb dataset script","airbnb script that uploads listings and host information ","AlbertUlysses","closed",,,"","",""
+93,"debug one fine stay scripts","debug one fine stay scripts for address2 errors","AlbertUlysses","closed",,,"","",""
+92,"debug","debugged the data from one fine stay.","AlbertUlysses","closed",,,"","",""
+91,"debug","Did some debugging for luxly platform script","AlbertUlysses","closed",,,"","",""
+90,"fixbug in hsodenials","hsodenials bugs are remved, hsodenials script is good to go.","AlbertUlysses","closed",,,"","",""
+89,"debug","Debugged the hso_registrant enteries.","AlbertUlysses","closed",,,"","",""
+88,"bugfix: fixed exempt script","fixed exempt table to match the rest of the database","AlbertUlysses","closed",,,"","",""
+87,"fixbug: fixed bug in categorically inelligible","fixed categoically ineligible bug","AlbertUlysses","closed",,,"","",""
+86,"bug fix: fixed bug for complaints data","fixed complaints data to not have nulls and to not allow any state that don't use two letter abbreviation. ","AlbertUlysses","closed",,,"","",""
+85,"refactor/debug: debugging each script","Refactored tot script to make sure it is uploading to sqlite correctly.","AlbertUlysses","closed",,,"","",""
+84,"re-refactor normalize address","normalize address wrapper returns none as default again because that's how the library handle it, shouldn't return two different options.","AlbertUlysses","closed",,,"","",""
+83,"refactored","refactoring some transformation files to fix bug","AlbertUlysses","closed",,,"","",""
+82,"feat: hso_registrant script","HSO_Registrant script is complete.
+","AlbertUlysses","closed",,,"","",""
+81,"refactor: no code change files moved","I moved some files around to better reflect where they belong.
+For exapmle, assessor table file will be move to processingscripts folder because the code is unique to the assessor dataset.
+However, normalize_address_wrapper will stay in the transformations folder because multiple scripts use this.
+Asssesor file (and builds) will now have the custom code and later include the processing code as well.","AlbertUlysses","closed",,,"","",""
+79,"refactor: add a new file to handle multiple files","Remove old code that does multiple files for a job and include a new module that handles that.","AlbertUlysses","closed",,,"","",""
+78,"feat: new script","Script that enters one fine stay data into platform table but has a different column layout then the other one fine stay sheets.","AlbertUlysses","closed",,,"","",""
+77,"refactor: include steps for entire folder","Added some new code that helps with multiple fies in a folder. This should be moved into it's own module in the future.","AlbertUlysses","closed",,,"","",""
+76,"refactor: move scripts to new folder","move all scripting files to processingscript folder and leave helper functions in the transformations folder.","AlbertUlysses","closed",,,"","",""
+75,"feat: platform one fine stay","Adding the custom script for uploading one fine stay data into platform database.","AlbertUlysses","closed",,,"","",""
+74,"refactor: refactor platform luxly","refactor the platform luxly file to make it more readable and closer allign with wemake style guide.","AlbertUlysses","closed",,,"","",""
+73,"docs: update license","updated the license to reflect Hack For LA's best practices. ","AlbertUlysses","closed",,,"","",""
+72,"feat: platform table luxly files","Completed script for loading luxly files into the platform database.","AlbertUlysses","closed",,,"","",""
+71,"feat: add hso revoked table","The script for inserting HSO Revoked data is complete.","AlbertUlysses","closed",,,"","",""
+70,"feat: exempt processing script","The processing script for the ""exempt"" dataset is done.","AlbertUlysses","closed",,,"","",""
+69,"feat: noncompliant script","Add script that inserts noncompliant data into database.","AlbertUlysses","closed",,,"","",""
+68,"feat: categorically inelligible","added a new script for the categorically ineligible dataset.","AlbertUlysses","closed",,,"","",""
+67,"feat: add complaints script","Added a new script that inputs Complaints data into the database.
+","AlbertUlysses","closed",,,"","",""
+64,"feat: tot insert file","New insert script complete.
+","AlbertUlysses","closed",,,"","",""
+63,"feat: add new abstraction function","refactored some code and created a new abstraction function for inserting data. ","AlbertUlysses","closed",,,"","",""
+62,"quick fix","removed absolute path : quick fix","AlbertUlysses","closed",,,"","",""
+61,"feat: hso_denials insert function","Created a new file for hso_denial table, Added test for the custom functions. Added the code to insert the hso_denials data into the db.
+1st dataset that's completely done
+Need to go back to refactor the code because it's ugly","AlbertUlysses","closed",,,"","",""
+58,"docs: adding COPYING file","removed some old jupyter files and added a license file which is called COPYING per GNU's suggestion.","AlbertUlysses","closed",,,"","",""
+57,"Feat: add Airbnb tables","added airbnb tables and updated SQL Alchemy relationships.","AlbertUlysses","closed",,,"","",""
+56,"feat: add SQL Alchemy models/Database","added SQL Alchemy Models and the Database code. ","AlbertUlysses","closed",,,"","",""
+55,"docs: rewrote some docstrings etc","updated some doc strings in files to better reflect the intent.","AlbertUlysses","closed",,,"","",""
+54,"feat: new code for date entries","I added new code and tests for formatting date columns. ","AlbertUlysses","closed",,,"","",""
+53,"refactor: update assessor table","Did some refactoring that addressed these issues:
+
+- Rewrote the code to match the WeMake style guide.
+- The functions are faster, and the memory is more efficient by ten times on average.
+- The functions are pure now, with no added consequence to the DataFrames or Series passed into them.
+All tests are still passing.","AlbertUlysses","closed",,,"","",""
+52,"docs: re-wrote some of the normalize_address information","Docs improvement for normailze_address","AlbertUlysses","closed",,,"","",""
+51,"refactored transformation scripts","refactored a lot and added extra notes in read me ","AlbertUlysses","closed",,,"","",""
+50,"tot update/completion","ToT table is done. Refactored it a bit - still passing test
+","AlbertUlysses","closed",,,"","",""
+49,"work on tot table","WIP - started work on TOT table and tests I will finish in tomorrow and start on a new table.","AlbertUlysses","closed",,,"","",""
+48,"Add expemtion table transformations","I added the transformation functions for exempt table with their tests, renamed the main folder.","AlbertUlysses","closed",,,"","",""
+47,"updating folder layout and adding transformation scripts with tests","introduces the first of a few scripts to the project along with some tests. Needs refactoring but all tests are passing.
+
+","AlbertUlysses","closed",,,"","",""
+42,"Update Pedestrian Data folder","Pedestrian Data folder including the relevant notebooks and visualizations.","dplem","closed",,,"","",""
+41,"Push request redo preprocessing files","","KarinaLopez19","closed",,,"","","AlbertUlysses"
+40,"Extend Mapillary API wrapper","These are new features for the python wrapper for Mapillary's API to allow it to:
+
+- Make requests of other layers of Mapillary data beyond traffic signs
+
+- Store photo image IDs associated with each item on map
+
+- Parse Mapillary's latitude/longitude string format
+
+- Give feedback while downloading data and timeout after 5 minutes, to prevent downloads from failing silently","henrykaplan","closed",,,"","henrykaplan","dplem,AlbertUlysses"
+39,"add bbox function","Created bbox function to help create a boundary box for cleaning data. Added one test to ensure any refactoring in the future still returns the correct expected results. ","AlbertUlysses","closed",,,"","",""
+38,"this is a test","","KarinaLopez19","closed",,,"","",""
+37,"add new folder with new functions to help with clean up","Adding the folder for airbnb listing and first function for helper functions that will be used for cleaning the data
+","AlbertUlysses","closed",,,"","",""
+35,"clean up jupyter notebook","cleaned up the jupyter notebook to make it more user friendly","AlbertUlysses","closed",,,"","",""
+34,"chore: clean up modules","used black to clean up modules and removed commented code that wasn't being used.","AlbertUlysses","closed",,,"","",""
diff --git a/pull_requests.csv b/pull_requests.csv
new file mode 100644
index 0000000..7f4e5e3
--- /dev/null
+++ b/pull_requests.csv
@@ -0,0 +1,1326 @@
+PR Number,Title,Description,Author,State,Number of Commits,Number of Files Changed,Labels,Assignees,Reviewers,Linked Issues,Has Issues
+208,"Adding Cleaning script version3",,"mru-hub","open",,,"","","salice",", , 208","false"
+205,"Added data loading and cleaning Jupyter notebook","- Initial implementation of data loading/ saving and cleaning.
+- Improvements planned for future versions.","mru-hub","closed",,,"","","",", , 205","false"
+201,"feat: City of Los Angeles Evictions #179","This commit introduces basic EDA on the LA evictions data, It introduces explode/splitting of Just Cause column for effective analysis
+
+starting #179 ","rahul897","open",,,"","","",", , 201","false"
+199,"177 create 311 data csv files that can be accessed through a jupyter notebook","Cleaning rules document from the 311-data","mru-hub","closed",,,"","","salice",", , 199","false"
+197,"N2020h issues to csv",,"n2020h","open",,,"","n2020h","",", , 197","false"
+196,"186 Adding Crime Data",,"dolla24","closed",,,"","dolla24","",", , 196","false"
+195,"fix name of image to work with Windows",,"salice","closed",,,"","","",", , 195","false"
+192,"Create issues-to-csv.yml","testing feature branch with draft pull request","n2020h","closed",,,"","","",", , 192","false"
+188,"Moving Workshop-Feedback_Bigram_Analysis.ipynb from access-the-data repo","Partially fixes https://github.com/hackforla/website/issues/6139
+
+### What changes did you make?
+- Adding Workshop-Feedback_Bigram_Analysis.ipynb from access-the-data repo (commit: https://github.com/hackforla/access-the-data/commit/62609601277509b0473bbf8f6dbf1ef735430c84) to this repo
+
+### Why did you make the changes?
+- Requested by Bonnie per [this comment](https://github.com/hackforla/website/issues/5127#issuecomment-1776306588)
+- AJ Price, Access the Data's PM, didn't want Jupyter Notebook to show up as a Language for the project, so Bonnie recommended moving the Workshop-Feedback_Bigram_Analysis.ipynb file to the Data Science repo.
+
+### Additional Notes
+- I recommend using rebase merging to preserve who authored the file.
+- To see examples of the commit history of the various methods of merging, see these branches in my forked repo:
+ - [Using rebase & merge](https://github.com/JessicaLucindaCheng/data-science/commits/rebase-merge): Recommended
+ - [Using merge commit](https://github.com/JessicaLucindaCheng/data-science/commits/merge-commit): Okay but results in 2 commits
+ - [Using squash & merge](https://github.com/JessicaLucindaCheng/data-science/commits/squash-merge): Not recommended because it doesn't preserve author's credit","JessicaLucindaCheng","closed",,,"","","","5127, 2607, 6139, 296, , , 188","false"
+187,"Moving Workshop-Feedback_Bigram_Analysis.ipynb from access-the-data repo","Fixes N/A -
+
+### What changes did you make?
+- Adding Workshop-Feedback_Bigram_Analysis.ipynb from access-the-data repo (commit: https://github.com/hackforla/access-the-data/commit/62609601277509b0473bbf8f6dbf1ef735430c84) to this repo
+
+### Why did you make the changes?
+- Requested by Bonnie per [this comment](https://github.com/hackforla/website/issues/5127#issuecomment-1776306588)
+- AJ Price, Access the Data's PM, didn't want Jupyter Notebook to show up as a Language for the project, so Bonnie recommended moving the Workshop-Feedback_Bigram_Analysis.ipynb file to the Data Science repo.
+
+### Additional Notes
+- I recommend using rebase merging to preserve who authored the file.
+- To see examples of the commit history of the various methods of merging, see these branches in my forked repo:
+ - [Using rebase & merge](https://github.com/JessicaLucindaCheng/data-science/commits/rebase-merge): Recommended
+ - [Using merge commit](https://github.com/JessicaLucindaCheng/data-science/commits/merge-commit): Okay but results in 2 commits
+ - [Using squash & merge](https://github.com/JessicaLucindaCheng/data-science/commits/squash-merge): Not recommended because it doesn't preserve author's credit","JessicaLucindaCheng","closed",,,"","","",", , 187","false"
+186,"Lalla la crime analysis 2",,"Lalla22","open",,,"","","salice",", , 186","false"
+185,"Create README.md",,"Lalla22","closed",,,"","","",", , 185","false"
+184,"git c",,"Lalla22","closed",,,"","","",", , 184","false"
+176,"build(deps): bump cryptography from 3.3.2 to 39.0.1 in /311-data/mapillarywrapper","Bumps [cryptography](https://github.com/pyca/cryptography) from 3.3.2 to 39.0.1.
+
+Changelog
+Sourced from cryptography's changelog.
+
+39.0.1 - 2023-02-07
+
+* **SECURITY ISSUE** - Fixed a bug where ``Cipher.update_into`` accepted Python
+ buffer protocol objects, but allowed immutable buffers. **CVE-2023-23931**
+* Updated Windows, macOS, and Linux wheels to be compiled with OpenSSL 3.0.8.
+.. _v39-0-0:
+39.0.0 - 2023-01-01
+
+
+- BACKWARDS INCOMPATIBLE: Support for OpenSSL 1.1.0 has been removed.
+Users on older version of OpenSSL will need to upgrade.
+- BACKWARDS INCOMPATIBLE: Dropped support for LibreSSL < 3.5. The new
+minimum LibreSSL version is 3.5.0. Going forward our policy is to support
+versions of LibreSSL that are available in versions of OpenBSD that are
+still receiving security support.
+- BACKWARDS INCOMPATIBLE: Removed the
encode_point and
+from_encoded_point methods on
+:class:~cryptography.hazmat.primitives.asymmetric.ec.EllipticCurvePublicNumbers,
+which had been deprecated for several years.
+:meth:~cryptography.hazmat.primitives.asymmetric.ec.EllipticCurvePublicKey.public_bytes
+and
+:meth:~cryptography.hazmat.primitives.asymmetric.ec.EllipticCurvePublicKey.from_encoded_point
+should be used instead.
+- BACKWARDS INCOMPATIBLE: Support for using MD5 or SHA1 in
+:class:
~cryptography.x509.CertificateBuilder, other X.509 builders, and
+PKCS7 has been removed.
+- BACKWARDS INCOMPATIBLE: Dropped support for macOS 10.10 and 10.11, macOS
+users must upgrade to 10.12 or newer.
+- ANNOUNCEMENT: The next version of
cryptography (40.0) will change
+the way we link OpenSSL. This will only impact users who build
+cryptography from source (i.e., not from a wheel), and specify their
+own version of OpenSSL. For those users, the CFLAGS, LDFLAGS,
+INCLUDE, LIB, and CRYPTOGRAPHY_SUPPRESS_LINK_FLAGS environment
+variables will no longer be respected. Instead, users will need to
+configure their builds as documented here_.
+- Added support for
+:ref:
disabling the legacy provider in OpenSSL 3.0.x<legacy-provider>.
+- Added support for disabling RSA key validation checks when loading RSA
+keys via
+:func:
~cryptography.hazmat.primitives.serialization.load_pem_private_key,
+:func:~cryptography.hazmat.primitives.serialization.load_der_private_key,
+and
+:meth:~cryptography.hazmat.primitives.asymmetric.rsa.RSAPrivateNumbers.private_key.
+This speeds up key loading but is :term:unsafe if you are loading potentially
+attacker supplied keys.
+- Significantly improved performance for
+:class:
~cryptography.hazmat.primitives.ciphers.aead.ChaCha20Poly1305
+
+
+
+... (truncated)
+
+
+Commits
+
+
+
+
+
+[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
+
+Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`.
+
+[//]: # (dependabot-automerge-start)
+[//]: # (dependabot-automerge-end)
+
+---
+
+
+Dependabot commands and options
+
+
+You can trigger Dependabot actions by commenting on this PR:
+- `@dependabot rebase` will rebase this PR
+- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it
+- `@dependabot merge` will merge this PR after your CI passes on it
+- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it
+- `@dependabot cancel merge` will cancel a previously requested merge and block automerging
+- `@dependabot reopen` will reopen this PR if it is closed
+- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually
+- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself)
+- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself)
+- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
+- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language
+- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language
+- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language
+- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language
+
+You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts).
+
+ ","dependabot[bot]","open",,,"dependencies","","",", , 176","false"
+175,"build(deps): bump certifi from 2021.5.30 to 2022.12.7 in /LAANE","Bumps [certifi](https://github.com/certifi/python-certifi) from 2021.5.30 to 2022.12.7.
+
+Commits
+
+
+
+
+
+[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
+
+Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`.
+
+[//]: # (dependabot-automerge-start)
+[//]: # (dependabot-automerge-end)
+
+---
+
+
+Dependabot commands and options
+
+
+You can trigger Dependabot actions by commenting on this PR:
+- `@dependabot rebase` will rebase this PR
+- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it
+- `@dependabot merge` will merge this PR after your CI passes on it
+- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it
+- `@dependabot cancel merge` will cancel a previously requested merge and block automerging
+- `@dependabot reopen` will reopen this PR if it is closed
+- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually
+- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself)
+- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself)
+- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
+- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language
+- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language
+- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language
+- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language
+
+You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts).
+
+ ","dependabot[bot]","open",,,"dependencies","","",", , 175","false"
+174,"build(deps): bump certifi from 2020.6.20 to 2022.12.7 in /311-data/mapillarywrapper","Bumps [certifi](https://github.com/certifi/python-certifi) from 2020.6.20 to 2022.12.7.
+
+Commits
+
+
+
+
+
+[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
+
+Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`.
+
+[//]: # (dependabot-automerge-start)
+[//]: # (dependabot-automerge-end)
+
+---
+
+
+Dependabot commands and options
+
+
+You can trigger Dependabot actions by commenting on this PR:
+- `@dependabot rebase` will rebase this PR
+- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it
+- `@dependabot merge` will merge this PR after your CI passes on it
+- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it
+- `@dependabot cancel merge` will cancel a previously requested merge and block automerging
+- `@dependabot reopen` will reopen this PR if it is closed
+- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually
+- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself)
+- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself)
+- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
+- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language
+- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language
+- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language
+- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language
+
+You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts).
+
+ ","dependabot[bot]","open",,,"dependencies","","",", , 174","false"
+173,"build(deps): bump nbconvert from 5.6.1 to 6.5.1 in /311-data/mapillarywrapper","Bumps [nbconvert](https://github.com/jupyter/nbconvert) from 5.6.1 to 6.5.1.
+
+Release notes
+Sourced from nbconvert's releases.
+
+Release 6.5.1
+No release notes provided.
+6.5.0
+What's Changed
+
+New Contributors
+
+Full Changelog: https://github.com/jupyter/nbconvert/compare/6.4.5...6.5
+6.4.3
+What's Changed
+
+New Contributors
+
+Full Changelog: https://github.com/jupyter/nbconvert/compare/6.4.2...6.4.3
+6.4.0
+What's Changed
+
+New Contributors
+
+
+... (truncated)
+
+
+Commits
+
+
+
+
+
+[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
+
+Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`.
+
+[//]: # (dependabot-automerge-start)
+[//]: # (dependabot-automerge-end)
+
+---
+
+
+Dependabot commands and options
+
+
+You can trigger Dependabot actions by commenting on this PR:
+- `@dependabot rebase` will rebase this PR
+- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it
+- `@dependabot merge` will merge this PR after your CI passes on it
+- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it
+- `@dependabot cancel merge` will cancel a previously requested merge and block automerging
+- `@dependabot reopen` will reopen this PR if it is closed
+- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually
+- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself)
+- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself)
+- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
+- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language
+- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language
+- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language
+- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language
+
+You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts).
+
+ ","dependabot[bot]","open",,,"dependencies","","","172, , , 173","false"
+172,"build(deps): bump nbconvert from 5.6.1 to 6.3.0 in /311-data/mapillarywrapper","Bumps [nbconvert](https://github.com/jupyter/nbconvert) from 5.6.1 to 6.3.0.
+
+Commits
+
+
+
+
+
+[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
+
+Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`.
+
+[//]: # (dependabot-automerge-start)
+[//]: # (dependabot-automerge-end)
+
+---
+
+
+Dependabot commands and options
+
+
+You can trigger Dependabot actions by commenting on this PR:
+- `@dependabot rebase` will rebase this PR
+- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it
+- `@dependabot merge` will merge this PR after your CI passes on it
+- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it
+- `@dependabot cancel merge` will cancel a previously requested merge and block automerging
+- `@dependabot reopen` will reopen this PR if it is closed
+- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually
+- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself)
+- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself)
+- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
+- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language
+- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language
+- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language
+- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language
+
+You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts).
+
+ ","dependabot[bot]","closed",,,"dependencies","","",", , 172","false"
+171,"build(deps): bump mistune from 0.8.4 to 2.0.3 in /311-data/mapillarywrapper","Bumps [mistune](https://github.com/lepture/mistune) from 0.8.4 to 2.0.3.
+
+Release notes
+Sourced from mistune's releases.
+
+Version 2.0.2
+Fix escape_url via lepture/mistune#295
+Version 2.0.1
+Fix XSS for image link syntax.
+Version 2.0.0
+First release of Mistune v2.
+Version 2.0.0 RC1
+In this release, we have a Security Fix for harmful links.
+Version 2.0.0 Alpha 1
+This is the first release of v2. An alpha version for users to have a preview of the new mistune.
+
+
+
+Changelog
+Sourced from mistune's changelog.
+
+Changelog
+Here is the full history of mistune v2.
+Version 2.0.4
+
+Released on Jul 15, 2022
+
+- Fix
url plugin in <a> tag
+- Fix
* formatting
+
+Version 2.0.3
+
+Released on Jun 27, 2022
+
+- Fix
table plugin
+- Security fix for CVE-2022-34749
+
+Version 2.0.2
+
+Released on Jan 14, 2022
+Fix escape_url
+Version 2.0.1
+
+Released on Dec 30, 2021
+XSS fix for image link syntax.
+Version 2.0.0
+
+Released on Dec 5, 2021
+This is the first non-alpha release of mistune v2.
+Version 2.0.0rc1
+
+Released on Feb 16, 2021
+Version 2.0.0a6
+
+</tr></table>
+
+
+... (truncated)
+
+
+Commits
+
+3f422f1 Version bump 2.0.3
+a6d4321 Fix asteris emphasis regex CVE-2022-34749
+5638e46 Merge pull request #307 from jieter/patch-1
+0eba471 Fix typo in guide.rst
+61e9337 Fix table plugin
+76dec68 Add documentation for renderer heading when TOC enabled
+799cd11 Version bump 2.0.2
+babb0cf Merge pull request #295 from dairiki/bug.escape_url
+fc2cd53 Make mistune.util.escape_url less aggressive
+3e8d352 Version bump 2.0.1
+- Additional commits viewable in compare view
+
+
+
+
+
+[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
+
+Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`.
+
+[//]: # (dependabot-automerge-start)
+[//]: # (dependabot-automerge-end)
+
+---
+
+
+Dependabot commands and options
+
+
+You can trigger Dependabot actions by commenting on this PR:
+- `@dependabot rebase` will rebase this PR
+- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it
+- `@dependabot merge` will merge this PR after your CI passes on it
+- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it
+- `@dependabot cancel merge` will cancel a previously requested merge and block automerging
+- `@dependabot reopen` will reopen this PR if it is closed
+- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually
+- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself)
+- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself)
+- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
+- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language
+- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language
+- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language
+- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language
+
+You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts).
+
+ ","dependabot[bot]","open",,,"dependencies","","",", , 171","false"
+168,"Updating label analysis jupyter notebook with co-occurrence analysis",,"rbianchetti","open",,,"","","",", , 168","false"
+167,"Merge pull request #164 from hackforla/160-survey-repo-labels","A python script that gets all the labels used in open issues across a…","codemamma","open",,,"","","",", , 167","false"
+166,"build(deps): bump numpy from 1.21.2 to 1.22.0 in /LAANE","Bumps [numpy](https://github.com/numpy/numpy) from 1.21.2 to 1.22.0.
+
+Release notes
+Sourced from numpy's releases.
+
+v1.22.0
+NumPy 1.22.0 Release Notes
+NumPy 1.22.0 is a big release featuring the work of 153 contributors
+spread over 609 pull requests. There have been many improvements,
+highlights are:
+
+- Annotations of the main namespace are essentially complete. Upstream
+is a moving target, so there will likely be further improvements,
+but the major work is done. This is probably the most user visible
+enhancement in this release.
+- A preliminary version of the proposed Array-API is provided. This is
+a step in creating a standard collection of functions that can be
+used across application such as CuPy and JAX.
+- NumPy now has a DLPack backend. DLPack provides a common interchange
+format for array (tensor) data.
+- New methods for
quantile, percentile, and related functions. The
+new methods provide a complete set of the methods commonly found in
+the literature.
+- A new configurable allocator for use by downstream projects.
+
+These are in addition to the ongoing work to provide SIMD support for
+commonly used functions, improvements to F2PY, and better documentation.
+The Python versions supported in this release are 3.8-3.10, Python 3.7
+has been dropped. Note that 32 bit wheels are only provided for Python
+3.8 and 3.9 on Windows, all other wheels are 64 bits on account of
+Ubuntu, Fedora, and other Linux distributions dropping 32 bit support.
+All 64 bit wheels are also linked with 64 bit integer OpenBLAS, which should fix
+the occasional problems encountered by folks using truly huge arrays.
+Expired deprecations
+Deprecated numeric style dtype strings have been removed
+Using the strings "Bytes0", "Datetime64", "Str0", "Uint32",
+and "Uint64" as a dtype will now raise a TypeError.
+(gh-19539)
+Expired deprecations for loads, ndfromtxt, and mafromtxt in npyio
+numpy.loads was deprecated in v1.15, with the recommendation that
+users use pickle.loads instead. ndfromtxt and mafromtxt were both
+deprecated in v1.17 - users should use numpy.genfromtxt instead with
+the appropriate value for the usemask parameter.
+(gh-19615)
+
+
+... (truncated)
+
+
+Commits
+
+
+
+
+
+[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
+
+Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`.
+
+[//]: # (dependabot-automerge-start)
+[//]: # (dependabot-automerge-end)
+
+---
+
+
+Dependabot commands and options
+
+
+You can trigger Dependabot actions by commenting on this PR:
+- `@dependabot rebase` will rebase this PR
+- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it
+- `@dependabot merge` will merge this PR after your CI passes on it
+- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it
+- `@dependabot cancel merge` will cancel a previously requested merge and block automerging
+- `@dependabot reopen` will reopen this PR if it is closed
+- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually
+- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself)
+- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself)
+- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
+- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language
+- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language
+- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language
+- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language
+
+You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts).
+
+ ","dependabot[bot]","open",,,"dependencies","","",", , 166","false"
+165,"build(deps): bump numpy from 1.18.5 to 1.22.0 in /311-data/mapillarywrapper","Bumps [numpy](https://github.com/numpy/numpy) from 1.18.5 to 1.22.0.
+
+Release notes
+Sourced from numpy's releases.
+
+v1.22.0
+NumPy 1.22.0 Release Notes
+NumPy 1.22.0 is a big release featuring the work of 153 contributors
+spread over 609 pull requests. There have been many improvements,
+highlights are:
+
+- Annotations of the main namespace are essentially complete. Upstream
+is a moving target, so there will likely be further improvements,
+but the major work is done. This is probably the most user visible
+enhancement in this release.
+- A preliminary version of the proposed Array-API is provided. This is
+a step in creating a standard collection of functions that can be
+used across application such as CuPy and JAX.
+- NumPy now has a DLPack backend. DLPack provides a common interchange
+format for array (tensor) data.
+- New methods for
quantile, percentile, and related functions. The
+new methods provide a complete set of the methods commonly found in
+the literature.
+- A new configurable allocator for use by downstream projects.
+
+These are in addition to the ongoing work to provide SIMD support for
+commonly used functions, improvements to F2PY, and better documentation.
+The Python versions supported in this release are 3.8-3.10, Python 3.7
+has been dropped. Note that 32 bit wheels are only provided for Python
+3.8 and 3.9 on Windows, all other wheels are 64 bits on account of
+Ubuntu, Fedora, and other Linux distributions dropping 32 bit support.
+All 64 bit wheels are also linked with 64 bit integer OpenBLAS, which should fix
+the occasional problems encountered by folks using truly huge arrays.
+Expired deprecations
+Deprecated numeric style dtype strings have been removed
+Using the strings "Bytes0", "Datetime64", "Str0", "Uint32",
+and "Uint64" as a dtype will now raise a TypeError.
+(gh-19539)
+Expired deprecations for loads, ndfromtxt, and mafromtxt in npyio
+numpy.loads was deprecated in v1.15, with the recommendation that
+users use pickle.loads instead. ndfromtxt and mafromtxt were both
+deprecated in v1.17 - users should use numpy.genfromtxt instead with
+the appropriate value for the usemask parameter.
+(gh-19615)
+
+
+... (truncated)
+
+
+Commits
+
+
+
+
+
+[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
+
+Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`.
+
+[//]: # (dependabot-automerge-start)
+[//]: # (dependabot-automerge-end)
+
+---
+
+
+Dependabot commands and options
+
+
+You can trigger Dependabot actions by commenting on this PR:
+- `@dependabot rebase` will rebase this PR
+- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it
+- `@dependabot merge` will merge this PR after your CI passes on it
+- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it
+- `@dependabot cancel merge` will cancel a previously requested merge and block automerging
+- `@dependabot reopen` will reopen this PR if it is closed
+- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually
+- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself)
+- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself)
+- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
+- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language
+- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language
+- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language
+- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language
+
+You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts).
+
+ ","dependabot[bot]","open",,,"dependencies","","","142, , , 165","false"
+164,"A python script that gets all the labels used in open issues across a…","…n organization(s) via GitHub API","rbianchetti","closed",,,"","","","26, , , 164","false"
+159,"Linking latest pedestrian safety report presentation","This replaces a previous powerpoint presentation stored in the repo with links to the current report and presentation saved in Hack for LA's shared google drive.","henrykaplan","open",,,"","","akhaleghi",", , 159","false"
+158,"Data Science logo links and image files","The latest version of the Hack for LA data science logo is being worked on in Google Drive. I've updated the directory in the git repo with links to the latest and with png and svg versions of the latest logo files.","henrykaplan","closed",,,"","","akhaleghi","140, , , 158","false"
+142,"build(deps): bump numpy from 1.18.5 to 1.21.0 in /311-data/mapillarywrapper","Bumps [numpy](https://github.com/numpy/numpy) from 1.18.5 to 1.21.0.
+
+Release notes
+Sourced from numpy's releases.
+
+v1.21.0
+NumPy 1.21.0 Release Notes
+The NumPy 1.21.0 release highlights are
+
+- continued SIMD work covering more functions and platforms,
+- initial work on the new dtype infrastructure and casting,
+- universal2 wheels for Python 3.8 and Python 3.9 on Mac,
+- improved documentation,
+- improved annotations,
+- new
PCG64DXSM bitgenerator for random numbers.
+
+In addition there are the usual large number of bug fixes and other
+improvements.
+The Python versions supported for this release are 3.7-3.9. Official
+support for Python 3.10 will be added when it is released.
+:warning: Warning: there are unresolved problems compiling NumPy 1.21.0 with gcc-11.1 .
+
+- Optimization level
-O3 results in many wrong warnings when running the tests.
+- On some hardware NumPy will hang in an infinite loop.
+
+New functions
+Add PCG64DXSM BitGenerator
+Uses of the PCG64 BitGenerator in a massively-parallel context have
+been shown to have statistical weaknesses that were not apparent at the
+first release in numpy 1.17. Most users will never observe this weakness
+and are safe to continue to use PCG64. We have introduced a new
+PCG64DXSM BitGenerator that will eventually become the new default
+BitGenerator implementation used by default_rng in future releases.
+PCG64DXSM solves the statistical weakness while preserving the
+performance and the features of PCG64.
+See upgrading-pcg64 for more details.
+(gh-18906)
+Expired deprecations
+
+- The
shape argument numpy.unravel_index cannot be
+passed as dims keyword argument anymore. (Was deprecated in NumPy
+1.16.)
+
+
+
+... (truncated)
+
+
+Commits
+
+b235f9e Merge pull request #19283 from charris/prepare-1.21.0-release
+34aebc2 MAINT: Update 1.21.0-notes.rst
+493b64b MAINT: Update 1.21.0-changelog.rst
+07d7e72 MAINT: Remove accidentally created directory.
+032fca5 Merge pull request #19280 from charris/backport-19277
+7d25b81 BUG: Fix refcount leak in ResultType
+fa5754e BUG: Add missing DECREF in new path
+61127bb Merge pull request #19268 from charris/backport-19264
+143d45f Merge pull request #19269 from charris/backport-19228
+d80e473 BUG: Removed typing for == and != in dtypes
+- Additional commits viewable in compare view
+
+
+
+
+
+[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
+
+Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`.
+
+[//]: # (dependabot-automerge-start)
+[//]: # (dependabot-automerge-end)
+
+---
+
+
+Dependabot commands and options
+
+
+You can trigger Dependabot actions by commenting on this PR:
+- `@dependabot rebase` will rebase this PR
+- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it
+- `@dependabot merge` will merge this PR after your CI passes on it
+- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it
+- `@dependabot cancel merge` will cancel a previously requested merge and block automerging
+- `@dependabot reopen` will reopen this PR if it is closed
+- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually
+- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself)
+- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself)
+- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
+- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language
+- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language
+- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language
+- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language
+
+You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts).
+
+ ","dependabot[bot]","closed",,,"dependencies","","",", , 142","false"
+139,"Removed spaces from all filenames in pedestrian data directory","Changed filenames of all output files and graphics. No changes to file contents — these are final graphics and reports, and aren't aren't referenced by file path anywhere.","henrykaplan","closed",,,"","","",", , 139","false"
+132,"Pedestrian safety","Added readme documentation for downloading and organizing data, running Jupyter notebooks, and reorganized analysis files.","henrykaplan","closed",,,"","","",", , 132","false"
+123,"Create a new issue template for epics",,"akhaleghi","closed",,,"","","","120, , , 123","false"
+122,"Pedestrian safety draft visualizations and presentation","Further simplified and clarified maps and traffic accident charts, added these new charts and related discussion to the presentation file.","henrykaplan","closed",,,"","","AlbertUlysses",", , 122","false"
+117,"Pedestrian safety","- Add optional parameter to mapillary wrapper client to allow data requests of recent data only, to quickly update existing datasets.
+- Better maps and visualizations.
+- Use DBScan clustering algorithm to find clusters of traffic accidents (simplest way to roughly group accidents at specific intersections, as the accident data is mostly based around intersections).
+- Updated report ppt for August 5 presentation to data science group","henrykaplan","closed",,,"","","",", , 117","false"
+116,"webscraping folder initial commit","[Edited]
+
+This PR is a stand alone folder added to the 311-data folder. It contains a webscraping script for the tech stacks of each NC survey. See issue [44](https://github.com/hackforla/data-science/issues/44). The readme contains a link to the google sheets table of the tech used for each website.","rajindermavi","closed",,,"","","",", , 116","false"
+115,"docs","updated readme to include sample queries that LAANE is interested in doing against the database","AlbertUlysses","closed",,,"","","",", , 115","false"
+114,"docs","Update README/todos","AlbertUlysses","closed",,,"","","",", , 114","false"
+113,"docs","Updated misc docs","AlbertUlysses","closed",,,"","","",", , 113","false"
+112,"build(deps): bump urllib3 from 1.24.3 to 1.26.5 in /LAANE","Bumps [urllib3](https://github.com/urllib3/urllib3) from 1.24.3 to 1.26.5.
+
+Release notes
+Sourced from urllib3's releases.
+
+1.26.5
+:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap
+
+- Fixed deprecation warnings emitted in Python 3.10.
+- Updated vendored
six library to 1.16.0.
+- Improved performance of URL parser when splitting the authority component.
+
+If you or your organization rely on urllib3 consider supporting us via GitHub Sponsors
+1.26.4
+:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap
+
+- Changed behavior of the default
SSLContext when connecting to HTTPS proxy during HTTPS requests. The default SSLContext now sets check_hostname=True.
+
+If you or your organization rely on urllib3 consider supporting us via GitHub Sponsors
+1.26.3
+:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap
+
+If you or your organization rely on urllib3 consider supporting us via GitHub Sponsors
+1.26.2
+:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap
+
+- Fixed an issue where
wrap_socket and CERT_REQUIRED wouldn't be imported properly on Python 2.7.8 and earlier (Pull #2052)
+
+1.26.1
+:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap
+
+- Fixed an issue where two
User-Agent headers would be sent if a User-Agent header key is passed as bytes (Pull #2047)
+
+1.26.0
+:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap
+
+-
+
Added support for HTTPS proxies contacting HTTPS servers (Pull #1923, Pull #1806)
+
+-
+
Deprecated negotiating TLSv1 and TLSv1.1 by default. Users that
+still wish to use TLS earlier than 1.2 without a deprecation warning
+should opt-in explicitly by setting ssl_version=ssl.PROTOCOL_TLSv1_1 (Pull #2002)
+Starting in urllib3 v2.0: Connections that receive a DeprecationWarning will fail
+
+-
+
Deprecated Retry options Retry.DEFAULT_METHOD_WHITELIST, Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST
+and Retry(method_whitelist=...) in favor of Retry.DEFAULT_ALLOWED_METHODS,
+Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT, and Retry(allowed_methods=...)
+(Pull #2000) Starting in urllib3 v2.0: Deprecated options will be removed
+
+
+
+
+... (truncated)
+
+
+Changelog
+Sourced from urllib3's changelog.
+
+1.26.5 (2021-05-26)
+
+- Fixed deprecation warnings emitted in Python 3.10.
+- Updated vendored
six library to 1.16.0.
+- Improved performance of URL parser when splitting
+the authority component.
+
+1.26.4 (2021-03-15)
+
+- Changed behavior of the default
SSLContext when connecting to HTTPS proxy
+during HTTPS requests. The default SSLContext now sets check_hostname=True.
+
+1.26.3 (2021-01-26)
+
+1.26.2 (2020-11-12)
+
+- Fixed an issue where
wrap_socket and CERT_REQUIRED wouldn't
+be imported properly on Python 2.7.8 and earlier (Pull #2052)
+
+1.26.1 (2020-11-11)
+
+- Fixed an issue where two
User-Agent headers would be sent if a
+User-Agent header key is passed as bytes (Pull #2047)
+
+1.26.0 (2020-11-10)
+
+-
+
NOTE: urllib3 v2.0 will drop support for Python 2.
+Read more in the v2.0 Roadmap <https://urllib3.readthedocs.io/en/latest/v2-roadmap.html>_.
+
+-
+
Added support for HTTPS proxies contacting HTTPS servers (Pull #1923, Pull #1806)
+
+-
+
Deprecated negotiating TLSv1 and TLSv1.1 by default. Users that
+still wish to use TLS earlier than 1.2 without a deprecation warning
+
+
+
+
+... (truncated)
+
+
+Commits
+
+d161647 Release 1.26.5
+2d4a3fe Improve performance of sub-authority splitting in URL
+2698537 Update vendored six to 1.16.0
+07bed79 Fix deprecation warnings for Python 3.10 ssl module
+d725a9b Add Python 3.10 to GitHub Actions
+339ad34 Use pytest==6.2.4 on Python 3.10+
+f271c9c Apply latest Black formatting
+1884878 [1.26] Properly proxy EOF on the SSLTransport test suite
+a891304 Release 1.26.4
+8d65ea1 Merge pull request from GHSA-5phf-pp7p-vc2r
+- Additional commits viewable in compare view
+
+
+
+
+
+[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
+
+Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`.
+
+[//]: # (dependabot-automerge-start)
+[//]: # (dependabot-automerge-end)
+
+---
+
+
+Dependabot commands and options
+
+
+You can trigger Dependabot actions by commenting on this PR:
+- `@dependabot rebase` will rebase this PR
+- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it
+- `@dependabot merge` will merge this PR after your CI passes on it
+- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it
+- `@dependabot cancel merge` will cancel a previously requested merge and block automerging
+- `@dependabot reopen` will reopen this PR if it is closed
+- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually
+- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself)
+- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself)
+- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
+- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language
+- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language
+- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language
+- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language
+
+You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts).
+
+ ","dependabot[bot]","closed",,,"dependencies","","",", , 112","false"
+111,"Docs!","Updated docs like readme and add extra notes to some other files","AlbertUlysses","closed",,,"","","",", , 111","false"
+105,"debug","debugged some code that was entering incorrect addresses","AlbertUlysses","closed",,,"","","",", , 105","false"
+104,"docs/clean up","updated README and removed some old files.","AlbertUlysses","closed",,,"","","",", , 104","false"
+103,"feat: assessor script","Last script, passes tests refactored a bit","AlbertUlysses","closed",,,"","","",", , 103","false"
+102,"docs","Update Readme
+removed a lot of personal notes aimed for me during script writing","AlbertUlysses","closed",,,"","","",", , 102","false"
+101,"debug categorically inelligible","debug categorically inelligibl data","AlbertUlysses","closed",,,"","","",", , 101","false"
+100,"New scripts + debug","added some code that debugs the ofs and luxly datasets.
+Added a new script that handles warnings and citations.","AlbertUlysses","closed",,,"","","",", , 100","false"
+99,"bugfix","fixed bugs across 4 files that dealth with addresses that returned 0 for zipcode instead of a 5 number digit","AlbertUlysses","closed",,,"","","",", , 99","false"
+98,"feat: new script for warning","This script is for warning files that don't have only addresses and dates.","AlbertUlysses","closed",,,"","","",", , 98","false"
+97,"Two commits","First commit fixes the warning script file.
+The second commit adds an extra line describe what the script is for.
+","AlbertUlysses","closed",,,"","","",", , 97","false"
+96,"feat: new processesing script","script for airbnb reviews is complete.","AlbertUlysses","closed",,,"","","",", , 96","false"
+95,"feat: airbnb dataset script","airbnb script that uploads listings and host information ","AlbertUlysses","closed",,,"","","",", , 95","false"
+93,"debug one fine stay scripts","debug one fine stay scripts for address2 errors","AlbertUlysses","closed",,,"","","",", , 93","false"
+92,"debug","debugged the data from one fine stay.","AlbertUlysses","closed",,,"","","",", , 92","false"
+91,"debug","Did some debugging for luxly platform script","AlbertUlysses","closed",,,"","","",", , 91","false"
+90,"fixbug in hsodenials","hsodenials bugs are remved, hsodenials script is good to go.","AlbertUlysses","closed",,,"","","",", , 90","false"
+89,"debug","Debugged the hso_registrant enteries.","AlbertUlysses","closed",,,"","","",", , 89","false"
+88,"bugfix: fixed exempt script","fixed exempt table to match the rest of the database","AlbertUlysses","closed",,,"","","",", , 88","false"
+87,"fixbug: fixed bug in categorically inelligible","fixed categoically ineligible bug","AlbertUlysses","closed",,,"","","",", , 87","false"
+86,"bug fix: fixed bug for complaints data","fixed complaints data to not have nulls and to not allow any state that don't use two letter abbreviation. ","AlbertUlysses","closed",,,"","","",", , 86","false"
+85,"refactor/debug: debugging each script","Refactored tot script to make sure it is uploading to sqlite correctly.","AlbertUlysses","closed",,,"","","",", , 85","false"
+84,"re-refactor normalize address","normalize address wrapper returns none as default again because that's how the library handle it, shouldn't return two different options.","AlbertUlysses","closed",,,"","","",", , 84","false"
+83,"refactored","refactoring some transformation files to fix bug","AlbertUlysses","closed",,,"","","",", , 83","false"
+82,"feat: hso_registrant script","HSO_Registrant script is complete.
+","AlbertUlysses","closed",,,"","","",", , 82","false"
+81,"refactor: no code change files moved","I moved some files around to better reflect where they belong.
+For exapmle, assessor table file will be move to processingscripts folder because the code is unique to the assessor dataset.
+However, normalize_address_wrapper will stay in the transformations folder because multiple scripts use this.
+Asssesor file (and builds) will now have the custom code and later include the processing code as well.","AlbertUlysses","closed",,,"","","",", , 81","false"
+79,"refactor: add a new file to handle multiple files","Remove old code that does multiple files for a job and include a new module that handles that.","AlbertUlysses","closed",,,"","","",", , 79","false"
+78,"feat: new script","Script that enters one fine stay data into platform table but has a different column layout then the other one fine stay sheets.","AlbertUlysses","closed",,,"","","",", , 78","false"
+77,"refactor: include steps for entire folder","Added some new code that helps with multiple fies in a folder. This should be moved into it's own module in the future.","AlbertUlysses","closed",,,"","","",", , 77","false"
+76,"refactor: move scripts to new folder","move all scripting files to processingscript folder and leave helper functions in the transformations folder.","AlbertUlysses","closed",,,"","","",", , 76","false"
+75,"feat: platform one fine stay","Adding the custom script for uploading one fine stay data into platform database.","AlbertUlysses","closed",,,"","","",", , 75","false"
+74,"refactor: refactor platform luxly","refactor the platform luxly file to make it more readable and closer allign with wemake style guide.","AlbertUlysses","closed",,,"","","",", , 74","false"
+73,"docs: update license","updated the license to reflect Hack For LA's best practices. ","AlbertUlysses","closed",,,"","","",", , 73","false"
+72,"feat: platform table luxly files","Completed script for loading luxly files into the platform database.","AlbertUlysses","closed",,,"","","",", , 72","false"
+71,"feat: add hso revoked table","The script for inserting HSO Revoked data is complete.","AlbertUlysses","closed",,,"","","",", , 71","false"
+70,"feat: exempt processing script","The processing script for the ""exempt"" dataset is done.","AlbertUlysses","closed",,,"","","",", , 70","false"
+69,"feat: noncompliant script","Add script that inserts noncompliant data into database.","AlbertUlysses","closed",,,"","","",", , 69","false"
+68,"feat: categorically inelligible","added a new script for the categorically ineligible dataset.","AlbertUlysses","closed",,,"","","",", , 68","false"
+67,"feat: add complaints script","Added a new script that inputs Complaints data into the database.
+","AlbertUlysses","closed",,,"","","",", , 67","false"
+64,"feat: tot insert file","New insert script complete.
+","AlbertUlysses","closed",,,"","","",", , 64","false"
+63,"feat: add new abstraction function","refactored some code and created a new abstraction function for inserting data. ","AlbertUlysses","closed",,,"","","",", , 63","false"
+62,"quick fix","removed absolute path : quick fix","AlbertUlysses","closed",,,"","","",", , 62","false"
+61,"feat: hso_denials insert function","Created a new file for hso_denial table, Added test for the custom functions. Added the code to insert the hso_denials data into the db.
+1st dataset that's completely done
+Need to go back to refactor the code because it's ugly","AlbertUlysses","closed",,,"","","",", , 61","false"
+60,"build(deps): bump jupyterlab from 2.1.5 to 2.2.10 in /311-data/mapillarywrapper","Bumps [jupyterlab](https://github.com/jupyterlab/jupyterlab) from 2.1.5 to 2.2.10.
+
+Commits
+
+87fff87 New version
+8b88bd1 update canvas version to allow build
+d5e3649 Merge pull request from GHSA-4952-p58q-6crx
+9a8dadf Publish 2.2.9
+51fe0db bump version
+e8e144b New version
+a67a68e Merge pull request #9211 from meeseeksmachine/auto-backport-of-pr-9189-on-2.2.x
+1c7d14e Merge pull request #9173 from datalayer-contrib/2-2-x/revert-perf
+b8c5203 Backport PR #9189: Update session and kernel manager data only if there was a...
+bbc2959 Merge pull request #9168 from karlaspuldaro/notebook-kernel-display-2.2.x
+- Additional commits viewable in compare view
+
+
+
+
+
+[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
+
+Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`.
+
+[//]: # (dependabot-automerge-start)
+[//]: # (dependabot-automerge-end)
+
+---
+
+
+Dependabot commands and options
+
+
+You can trigger Dependabot actions by commenting on this PR:
+- `@dependabot rebase` will rebase this PR
+- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it
+- `@dependabot merge` will merge this PR after your CI passes on it
+- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it
+- `@dependabot cancel merge` will cancel a previously requested merge and block automerging
+- `@dependabot reopen` will reopen this PR if it is closed
+- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually
+- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself)
+- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself)
+- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
+- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language
+- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language
+- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language
+- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language
+
+You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts).
+
+ ","dependabot[bot]","closed",,,"dependencies","","",", , 60","false"
+58,"docs: adding COPYING file","removed some old jupyter files and added a license file which is called COPYING per GNU's suggestion.","AlbertUlysses","closed",,,"","","",", , 58","false"
+57,"Feat: add Airbnb tables","added airbnb tables and updated SQL Alchemy relationships.","AlbertUlysses","closed",,,"","","",", , 57","false"
+56,"feat: add SQL Alchemy models/Database","added SQL Alchemy Models and the Database code. ","AlbertUlysses","closed",,,"","","",", , 56","false"
+55,"docs: rewrote some docstrings etc","updated some doc strings in files to better reflect the intent.","AlbertUlysses","closed",,,"","","",", , 55","false"
+54,"feat: new code for date entries","I added new code and tests for formatting date columns. ","AlbertUlysses","closed",,,"","","",", , 54","false"
+53,"refactor: update assessor table","Did some refactoring that addressed these issues:
+
+- Rewrote the code to match the WeMake style guide.
+- The functions are faster, and the memory is more efficient by ten times on average.
+- The functions are pure now, with no added consequence to the DataFrames or Series passed into them.
+All tests are still passing.","AlbertUlysses","closed",,,"","","",", , 53","false"
+52,"docs: re-wrote some of the normalize_address information","Docs improvement for normailze_address","AlbertUlysses","closed",,,"","","",", , 52","false"
+51,"refactored transformation scripts","refactored a lot and added extra notes in read me ","AlbertUlysses","closed",,,"","","",", , 51","false"
+50,"tot update/completion","ToT table is done. Refactored it a bit - still passing test
+","AlbertUlysses","closed",,,"","","",", , 50","false"
+49,"work on tot table","WIP - started work on TOT table and tests I will finish in tomorrow and start on a new table.","AlbertUlysses","closed",,,"","","",", , 49","false"
+48,"Add expemtion table transformations","I added the transformation functions for exempt table with their tests, renamed the main folder.","AlbertUlysses","closed",,,"","","",", , 48","false"
+47,"updating folder layout and adding transformation scripts with tests","introduces the first of a few scripts to the project along with some tests. Needs refactoring but all tests are passing.
+
+","AlbertUlysses","closed",,,"","","",", , 47","false"
+43,"build(deps): bump urllib3 from 1.24.3 to 1.26.5 in /311-data/mapillarywrapper","Bumps [urllib3](https://github.com/urllib3/urllib3) from 1.24.3 to 1.26.5.
+
+Release notes
+Sourced from urllib3's releases.
+
+1.26.5
+:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap
+
+- Fixed deprecation warnings emitted in Python 3.10.
+- Updated vendored
six library to 1.16.0.
+- Improved performance of URL parser when splitting the authority component.
+
+If you or your organization rely on urllib3 consider supporting us via GitHub Sponsors
+1.26.4
+:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap
+
+- Changed behavior of the default
SSLContext when connecting to HTTPS proxy during HTTPS requests. The default SSLContext now sets check_hostname=True.
+
+If you or your organization rely on urllib3 consider supporting us via GitHub Sponsors
+1.26.3
+:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap
+
+If you or your organization rely on urllib3 consider supporting us via GitHub Sponsors
+1.26.2
+:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap
+
+- Fixed an issue where
wrap_socket and CERT_REQUIRED wouldn't be imported properly on Python 2.7.8 and earlier (Pull #2052)
+
+1.26.1
+:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap
+
+- Fixed an issue where two
User-Agent headers would be sent if a User-Agent header key is passed as bytes (Pull #2047)
+
+1.26.0
+:warning: IMPORTANT: urllib3 v2.0 will drop support for Python 2: Read more in the v2.0 Roadmap
+
+-
+
Added support for HTTPS proxies contacting HTTPS servers (Pull #1923, Pull #1806)
+
+-
+
Deprecated negotiating TLSv1 and TLSv1.1 by default. Users that
+still wish to use TLS earlier than 1.2 without a deprecation warning
+should opt-in explicitly by setting ssl_version=ssl.PROTOCOL_TLSv1_1 (Pull #2002)
+Starting in urllib3 v2.0: Connections that receive a DeprecationWarning will fail
+
+-
+
Deprecated Retry options Retry.DEFAULT_METHOD_WHITELIST, Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST
+and Retry(method_whitelist=...) in favor of Retry.DEFAULT_ALLOWED_METHODS,
+Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT, and Retry(allowed_methods=...)
+(Pull #2000) Starting in urllib3 v2.0: Deprecated options will be removed
+
+
+
+
+... (truncated)
+
+
+Changelog
+Sourced from urllib3's changelog.
+
+1.26.5 (2021-05-26)
+
+- Fixed deprecation warnings emitted in Python 3.10.
+- Updated vendored
six library to 1.16.0.
+- Improved performance of URL parser when splitting
+the authority component.
+
+1.26.4 (2021-03-15)
+
+- Changed behavior of the default
SSLContext when connecting to HTTPS proxy
+during HTTPS requests. The default SSLContext now sets check_hostname=True.
+
+1.26.3 (2021-01-26)
+
+1.26.2 (2020-11-12)
+
+- Fixed an issue where
wrap_socket and CERT_REQUIRED wouldn't
+be imported properly on Python 2.7.8 and earlier (Pull #2052)
+
+1.26.1 (2020-11-11)
+
+- Fixed an issue where two
User-Agent headers would be sent if a
+User-Agent header key is passed as bytes (Pull #2047)
+
+1.26.0 (2020-11-10)
+
+-
+
NOTE: urllib3 v2.0 will drop support for Python 2.
+Read more in the v2.0 Roadmap <https://urllib3.readthedocs.io/en/latest/v2-roadmap.html>_.
+
+-
+
Added support for HTTPS proxies contacting HTTPS servers (Pull #1923, Pull #1806)
+
+-
+
Deprecated negotiating TLSv1 and TLSv1.1 by default. Users that
+still wish to use TLS earlier than 1.2 without a deprecation warning
+
+
+
+
+... (truncated)
+
+
+Commits
+
+d161647 Release 1.26.5
+2d4a3fe Improve performance of sub-authority splitting in URL
+2698537 Update vendored six to 1.16.0
+07bed79 Fix deprecation warnings for Python 3.10 ssl module
+d725a9b Add Python 3.10 to GitHub Actions
+339ad34 Use pytest==6.2.4 on Python 3.10+
+f271c9c Apply latest Black formatting
+1884878 [1.26] Properly proxy EOF on the SSLTransport test suite
+a891304 Release 1.26.4
+8d65ea1 Merge pull request from GHSA-5phf-pp7p-vc2r
+- Additional commits viewable in compare view
+
+
+
+
+
+[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
+
+Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`.
+
+[//]: # (dependabot-automerge-start)
+[//]: # (dependabot-automerge-end)
+
+---
+
+
+Dependabot commands and options
+
+
+You can trigger Dependabot actions by commenting on this PR:
+- `@dependabot rebase` will rebase this PR
+- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it
+- `@dependabot merge` will merge this PR after your CI passes on it
+- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it
+- `@dependabot cancel merge` will cancel a previously requested merge and block automerging
+- `@dependabot reopen` will reopen this PR if it is closed
+- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually
+- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself)
+- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself)
+- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
+- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language
+- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language
+- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language
+- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language
+
+You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/hackforla/data-science/network/alerts).
+
+ ","dependabot[bot]","closed",,,"dependencies","","",", , 43","false"
+42,"Update Pedestrian Data folder","Pedestrian Data folder including the relevant notebooks and visualizations.","dplem","closed",,,"","","",", , 42","false"
+41,"Push request redo preprocessing files","","KarinaLopez19","closed",,,"","","AlbertUlysses",", , 41","false"
+40,"Extend Mapillary API wrapper","These are new features for the python wrapper for Mapillary's API to allow it to:
+
+- Make requests of other layers of Mapillary data beyond traffic signs
+
+- Store photo image IDs associated with each item on map
+
+- Parse Mapillary's latitude/longitude string format
+
+- Give feedback while downloading data and timeout after 5 minutes, to prevent downloads from failing silently","henrykaplan","closed",,,"","henrykaplan","dplem,AlbertUlysses",", , 40","false"
+39,"add bbox function","Created bbox function to help create a boundary box for cleaning data. Added one test to ensure any refactoring in the future still returns the correct expected results. ","AlbertUlysses","closed",,,"","","",", , 39","false"
+38,"this is a test","","KarinaLopez19","closed",,,"","","",", , 38","false"
+37,"add new folder with new functions to help with clean up","Adding the folder for airbnb listing and first function for helper functions that will be used for cleaning the data
+","AlbertUlysses","closed",,,"","","",", , 37","false"
+35,"clean up jupyter notebook","cleaned up the jupyter notebook to make it more user friendly","AlbertUlysses","closed",,,"","","",", , 35","false"