diff --git a/.github/workflows/sync-license-data.yml b/.github/workflows/sync-license-data.yml index 3e6c56c7f..9e93cc315 100644 --- a/.github/workflows/sync-license-data.yml +++ b/.github/workflows/sync-license-data.yml @@ -51,7 +51,7 @@ jobs: - name: Create Pull Request if: steps.check-changes.outputs.changes == 'true' - uses: peter-evans/create-pull-request@v5 + uses: peter-evans/create-pull-request@v8 with: commit-message: | feat: update SPDX license data diff --git a/.github/workflows/sync-types.yml b/.github/workflows/sync-types.yml new file mode 100644 index 000000000..f60d471ba --- /dev/null +++ b/.github/workflows/sync-types.yml @@ -0,0 +1,61 @@ +name: Sync Deno & web types + +on: + schedule: + - cron: "0 6 1 * *" + workflow_dispatch: + +jobs: + sync-types: + runs-on: ubuntu-latest + permissions: + contents: write + pull-requests: write + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Install Deno + uses: denoland/setup-deno@v2 + + - name: Deno types + run: deno task tools:deno_symbols + + - name: Update MDN + run: deno update @mdn/browser-compat-data + + - name: Web types + run: deno task tools:web_symbols + + - name: Check for changes + id: check-changes + run: | + git add api/src/api/docs/deno_types.json api/src/api/docs/web_builtins.json + if git diff --cached --quiet; then + echo "changes=false" >> $GITHUB_OUTPUT + else + echo "changes=true" >> $GITHUB_OUTPUT + fi + + - name: Stage remaining files + run: git add deno.json deno.lock + + - name: Create Pull Request + if: steps.check-changes.outputs.changes == 'true' + uses: peter-evans/create-pull-request@v8 + with: + commit-message: | + chore: update Deno & web types + + Automated update of Deno types and web types. + title: "chore: update Deno & web types" + body: | + ## Summary + + This PR updates the Deno types with the latest CLI version, and the web types with the latest @mdn/browser-compat-data version. + + ### Automation + This PR was created automatically by the monthly sync workflow. + branch: update-types + delete-branch: true diff --git a/api/.sqlx/query-92b0156e95d72192b949765c89601d8d7d0f99c1ab91184473d2afcd06c287e9.json b/api/.sqlx/query-92b0156e95d72192b949765c89601d8d7d0f99c1ab91184473d2afcd06c287e9.json new file mode 100644 index 000000000..850fc3721 --- /dev/null +++ b/api/.sqlx/query-92b0156e95d72192b949765c89601d8d7d0f99c1ab91184473d2afcd06c287e9.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "DELETE FROM oauth_states WHERE created_at < $1", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "92b0156e95d72192b949765c89601d8d7d0f99c1ab91184473d2afcd06c287e9" +} diff --git a/api/src/api/mod.rs b/api/src/api/mod.rs index 20b73573b..1bac906f5 100644 --- a/api/src/api/mod.rs +++ b/api/src/api/mod.rs @@ -2,7 +2,7 @@ mod admin; mod authorization; mod errors; -mod package; +pub mod package; mod publishing_task; mod scope; mod self_user; diff --git a/api/src/api/package.rs b/api/src/api/package.rs index d5bd82a4e..296ad7bc4 100644 --- a/api/src/api/package.rs +++ b/api/src/api/package.rs @@ -118,7 +118,7 @@ use super::ApiUpdatePackageGithubRepositoryRequest; use super::ApiUpdatePackageRequest; use super::ApiUpdatePackageVersionRequest; -const MAX_PUBLISH_TARBALL_SIZE: u64 = 20 * 1024 * 1024; // 20mb +pub const MAX_PUBLISH_TARBALL_SIZE: u64 = 20 * 1024 * 1024; // 20mb pub struct PublishQueue(pub Option); diff --git a/api/src/db/database.rs b/api/src/db/database.rs index f1a484a16..61523fdc3 100644 --- a/api/src/db/database.rs +++ b/api/src/db/database.rs @@ -3480,6 +3480,20 @@ impl Database { .await } + #[instrument(name = "Database::delete_expired_oauth_states", skip(self), err)] + pub async fn delete_expired_oauth_states( + &self, + older_than: DateTime, + ) -> Result { + let result = sqlx::query!( + "DELETE FROM oauth_states WHERE created_at < $1", + older_than + ) + .execute(&self.pool) + .await?; + Ok(result.rows_affected()) + } + #[instrument(name = "Database::insert_oauth_state", skip( self, new_oauth_state diff --git a/api/src/publish.rs b/api/src/publish.rs index d10c02416..435a143b5 100644 --- a/api/src/publish.rs +++ b/api/src/publish.rs @@ -455,6 +455,7 @@ async fn upload_npm_version_manifest( pub mod tests { use super::*; use crate::api::ApiPublishingTask; + use crate::api::package::MAX_PUBLISH_TARBALL_SIZE; use crate::db::CreatePackageResult; use crate::db::CreatePublishingTaskResult; use crate::db::NewPublishingTask; @@ -615,7 +616,7 @@ pub mod tests { #[tokio::test] async fn payload_too_large() { - let body = Body::from(vec![0; 999999999]); + let body = Body::from(vec![0; MAX_PUBLISH_TARBALL_SIZE as usize + 10]); let mut t = TestSetup::new().await; let mut resp = t @@ -638,7 +639,7 @@ pub mod tests { async fn payload_too_large_stream() { // Convert the Vec into a hyper Body with chunked transfer encoding let body = Body::wrap_stream(tokio_stream::once(Ok::<_, std::io::Error>( - vec![0; 999999999], + vec![0; MAX_PUBLISH_TARBALL_SIZE as usize + 10], ))); let mut t = TestSetup::new().await; diff --git a/api/src/tasks.rs b/api/src/tasks.rs index 32b608b3d..f246b9cf2 100644 --- a/api/src/tasks.rs +++ b/api/src/tasks.rs @@ -1,6 +1,7 @@ // Copyright 2024 the JSR authors. All rights reserved. MIT license. use bytes::Bytes; use chrono::DateTime; +use chrono::Duration; use chrono::Utc; use deno_semver::StackString; use deno_semver::VersionReq; @@ -74,6 +75,10 @@ pub fn tasks_router() -> Router { "/scrape_download_counts", util::json(scrape_download_counts_handler), ) + .post( + "/clean_oauth_states", + util::json(clean_oauth_states_handler), + ) .build() .unwrap() } @@ -475,6 +480,15 @@ ORDER BY Ok(()) } +#[instrument(name = "POST /tasks/clean_oauth_states", skip(req), err)] +pub async fn clean_oauth_states_handler(req: Request) -> ApiResult<()> { + let db = req.data::().unwrap().clone(); + let cutoff = Utc::now() - Duration::hours(1); + let deleted = db.delete_expired_oauth_states(cutoff).await?; + tracing::info!(deleted, "cleaned up expired oauth states"); + Ok(()) +} + async fn insert_analytics_download_entries( db: &Database, records: Vec, diff --git a/api/src/util.rs b/api/src/util.rs index 2d895eb32..54c090a4d 100644 --- a/api/src/util.rs +++ b/api/src/util.rs @@ -504,6 +504,8 @@ pub mod test { use crate::util::LicenseStore; static SERVERS_STARTED: std::sync::OnceLock<()> = std::sync::OnceLock::new(); + static LICENSE_STORE: std::sync::OnceLock = + std::sync::OnceLock::new(); static TEST_INSTANCE_COUNTER: std::sync::atomic::AtomicU64 = std::sync::atomic::AtomicU64::new(0); @@ -676,7 +678,8 @@ pub mod test { db.add_bad_word_for_test("somebadword").await.unwrap(); - let license_store = super::license_store(); + let license_store = + LICENSE_STORE.get_or_init(super::license_store).clone(); let router = crate::main_router(MainRouterOptions { database: db, diff --git a/terraform/scheduler.tf b/terraform/scheduler.tf index 85ef80bcd..b99dedad3 100644 --- a/terraform/scheduler.tf +++ b/terraform/scheduler.tf @@ -14,6 +14,21 @@ resource "google_cloud_scheduler_job" "npm_tarball_rebuild_missing" { } } +resource "google_cloud_scheduler_job" "clean_oauth_states" { + name = "clean-oauth-states" + description = "Delete expired OAuth states older than 1 hour." + schedule = "0 0 * * *" + region = "us-central1" + + http_target { + http_method = "POST" + uri = "${google_cloud_run_v2_service.registry_api_tasks.uri}/tasks/clean_oauth_states" + oidc_token { + service_account_email = google_service_account.task_dispatcher.email + } + } +} + resource "google_cloud_scheduler_job" "scrape_download_counts" { name = "scrape-download-counts" description = "Scrape download counts from BigQuery & Analytics Engine and insert them into Postgres."